Exemple #1
0
void GenerateAffineTransformFromPose(CvSize size, CvAffinePose pose, CvMat* transform)
{
	CvMat* temp = cvCreateMat(3, 3, CV_32FC1);
	CvMat* final = cvCreateMat(3, 3, CV_32FC1);
	cvmSet(temp, 2, 0, 0.0f);
	cvmSet(temp, 2, 1, 0.0f);
	cvmSet(temp, 2, 2, 1.0f);

	CvMat rotation;
	cvGetSubRect(temp, &rotation, cvRect(0, 0, 3, 2));

	cv2DRotationMatrix(cvPoint2D32f(size.width/2, size.height/2), pose.phi, 1.0, &rotation);
	cvCopy(temp, final);

	cvmSet(temp, 0, 0, pose.lambda1);
	cvmSet(temp, 0, 1, 0.0f);
	cvmSet(temp, 1, 0, 0.0f);
	cvmSet(temp, 1, 1, pose.lambda2);
	cvmSet(temp, 0, 2, size.width/2*(1 - pose.lambda1));
	cvmSet(temp, 1, 2, size.height/2*(1 - pose.lambda2));
	cvMatMul(temp, final, final);

	cv2DRotationMatrix(cvPoint2D32f(size.width/2, size.height/2), pose.theta - pose.phi, 1.0, &rotation);
	cvMatMul(temp, final, final);

	cvGetSubRect(final, &rotation, cvRect(0, 0, 3, 2));
	cvCopy(&rotation, transform);

	cvReleaseMat(&temp);
	cvReleaseMat(&final);
}
Exemple #2
0
void AffineTransformPatch(IplImage* src, IplImage* dst, CvAffinePose pose)
{
	CvRect src_large_roi = cvGetImageROI(src);

	IplImage* temp = cvCreateImage(cvSize(src_large_roi.width, src_large_roi.height), IPL_DEPTH_32F, src->nChannels);
	cvSetZero(temp);
	IplImage* temp2 = cvCloneImage(temp);
	CvMat* rotation_phi = cvCreateMat(2, 3, CV_32FC1);

	CvSize new_size = cvSize(temp->width*pose.lambda1, temp->height*pose.lambda2);
	IplImage* temp3 = cvCreateImage(new_size, IPL_DEPTH_32F, src->nChannels);

	cvConvertScale(src, temp);
	cvResetImageROI(temp);


	cv2DRotationMatrix(cvPoint2D32f(temp->width/2, temp->height/2), pose.phi, 1.0, rotation_phi);
	cvWarpAffine(temp, temp2, rotation_phi);

	cvSetZero(temp);

	cvResize(temp2, temp3);

	cv2DRotationMatrix(cvPoint2D32f(temp3->width/2, temp3->height/2), pose.theta - pose.phi, 1.0, rotation_phi);
	cvWarpAffine(temp3, temp, rotation_phi);

	cvSetImageROI(temp, cvRect(temp->width/2 - src_large_roi.width/4, temp->height/2 - src_large_roi.height/4,
		src_large_roi.width/2, src_large_roi.height/2));
	cvConvertScale(temp, dst);
	cvReleaseMat(&rotation_phi);

	cvReleaseImage(&temp3);
	cvReleaseImage(&temp2);
	cvReleaseImage(&temp);
}
Exemple #3
0
IplImage* rotateImage(const IplImage* img, float angleDegrees)
{
	int height    = img->height;
	int width     = img->width;
	int step      = img->widthStep;
	int channels  = img->nChannels; 

	IplImage *rotatedImg = cvCreateImage(
							cvSize(height, width), 
							IPL_DEPTH_8U, 
							channels
							);

  	CvPoint2D32f center;
	center.x = width/2.0f;
	center.y = height/2.0f;

	CvMat *mapMatrix = cvCreateMat( 2, 3, CV_32FC1 );

	float x = width - 1.0f;
	float y = 0.0f;

	cv2DRotationMatrix(center, angleDegrees, 1, mapMatrix);
	cvmSet(mapMatrix, 0, 2 ,y);
	cvmSet(mapMatrix, 1, 2 ,x);
	cvWarpAffine(img, rotatedImg, mapMatrix, CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS, cvScalarAll(0));

	return(rotatedImg);
}
Exemple #4
0
    int InitNextImage(IplImage* img)
    {
        CvSize sz(img->width, img->height);
        ReallocImage(&imgGray, sz, 1);
        ReallocImage(&imgThresh, sz, 1);
        ptRotate = face[MOUTH].ptCenter;
        float m[6];
        CvMat mat = cvMat( 2, 3, CV_32FC1, m );

        if (NULL == imgGray || NULL == imgThresh)
            return 0;

        /*m[0] = (float)cos(-dbRotateAngle*CV_PI/180.);
        m[1] = (float)sin(-dbRotateAngle*CV_PI/180.);
        m[2] = (float)ptRotate.x;
        m[3] = -m[1];
        m[4] = m[0];
        m[5] = (float)ptRotate.y;*/
        cv2DRotationMatrix( cvPointTo32f(ptRotate), -dbRotateAngle, 1., &mat );
        cvWarpAffine( img, imgGray, &mat );

        if (NULL == mstgContours)
            mstgContours = cvCreateMemStorage();
        else
            cvClearMemStorage(mstgContours);
        if (NULL == mstgContours)
            return 0;
        return 1;
    }
/*!
    \fn CvFaceSegment::rotate(IplImage *img)
 */
IplImage* CvFaceSegment::rotate(IplImage *img)
{
  int xl = lefteye.x;
  int yl = lefteye.y;
  int xr = righteye.x;
  int yr = righteye.y;
  
  double angle = atan((double)(yr-yl)/(double)(xr-xl));
  angle = 180*angle/CV_PI;
  
  double distance = sqrt((double)(pow((xl-xr),2)+pow((yl-yr),2)));
  int dis = (int)round(distance);
  
  CvMat* map_matrix = cvCreateMat(2,3,CV_32FC1);
  cv2DRotationMatrix( cvPointTo32f( righteye ), angle, 1.0, map_matrix);
  
  IplImage* newimg = cvCreateImage( cvGetSize(img), IPL_DEPTH_8U, 3 );
  cvWarpAffine( img, newimg, map_matrix, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0) );
  
  lefteye.y = righteye.y;
  lefteye.x = righteye.x+dis;
  cvReleaseMat( &map_matrix );
  
  /*
  cvCircle( newimg, righteye, 4, cvScalar(0, 255, 0, 0), 1, 8, 0 );
  cvCircle( newimg, lefteye, 4, cvScalar(0, 255, 0, 0), 1, 8, 0 );
  cvNamedWindow("Scale", CV_WINDOW_AUTOSIZE);
  cvShowImage("Scale", newimg);
  cvWaitKey(0);
  */
  return newimg;
}
  void PreProcessor::rotate(const cv::Mat &img_input, cv::Mat &img_output, float angle)
  {
    IplImage* image = new IplImage(img_input);

    //IplImage *rotatedImage = cvCreateImage(cvSize(480,320), IPL_DEPTH_8U, image->nChannels);
    //IplImage *rotatedImage = cvCreateImage(cvSize(image->width,image->height), IPL_DEPTH_8U, image->nChannels);
    IplImage* rotatedImage = cvCreateImage(cvSize(image->height, image->width), IPL_DEPTH_8U, image->nChannels);

    CvPoint2D32f center;
    //center.x = 160;
    //center.y = 160;
    center.x = (image->height / 2);
    center.y = (image->width / 2);

    CvMat* mapMatrix = cvCreateMat(2, 3, CV_32FC1);

    cv2DRotationMatrix(center, angle, 1.0, mapMatrix);
    cvWarpAffine(image, rotatedImage, mapMatrix, CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS, cvScalarAll(0));

    cv::Mat img_rot = cv::cvarrToMat(rotatedImage);
    img_rot.copyTo(img_output);

    cvReleaseImage(&image);
    cvReleaseImage(&rotatedImage);
    cvReleaseMat(&mapMatrix);
  }
void COpenCVMFCView::OnWarpAffine()
{
	// TODO: Add your command handler code here

	CvPoint2D32f srcTri[3], dstTri[3];
	CvMat* rot_mat  = cvCreateMat(2,3,CV_32FC1);
	CvMat* warp_mat = cvCreateMat(2,3,CV_32FC1);
	IplImage *src=0, *dst=0;

	src = cvCloneImage(workImg);
	cvFlip(src);
	dst = cvCloneImage(src);
	dst->origin = src->origin;
	cvZero(dst);

	//COMPUTE WARP MATRIX
	srcTri[0].x = 0;                          //src Top left
	srcTri[0].y = 0;
	srcTri[1].x = (float) src->width - 1;     //src Top right
	srcTri[1].y = 0;
	srcTri[2].x = 0;                          //src Bottom left
	srcTri[2].y = (float) src->height - 1;
	//- - - - - - - - - - - - - - -//
	dstTri[0].x = (float)(src->width*0.0);    //dst Top left
	dstTri[0].y = (float)(src->height*0.33);
	dstTri[1].x = (float)(src->width*0.85);   //dst Top right
	dstTri[1].y = (float)(src->height*0.25);
	dstTri[2].x = (float)(src->width*0.15);   //dst Bottom left
	dstTri[2].y = (float)(src->height*0.7);
	cvGetAffineTransform(srcTri,dstTri,warp_mat);
	cvWarpAffine(src,dst,warp_mat);
	cvCopy(dst,src);

	//COMPUTE ROTATION MATRIX
	CvPoint2D32f center = cvPoint2D32f(src->width/2,src->height/2);
	double angle = -50.0;
	double scale = 0.6;
	cv2DRotationMatrix(center,angle,scale,rot_mat);
	cvWarpAffine(src,dst,rot_mat);

	//DO THE TRANSFORM:
	cvNamedWindow( "Affine_Transform", 1 );
	cvShowImage( "Affine_Transform", dst );

	m_ImageType = -3;

	cvWaitKey();

	cvDestroyWindow( "Affine_Transform" );
	cvReleaseImage(&src);
	cvReleaseImage(&dst);
	cvReleaseMat(&rot_mat);
	cvReleaseMat(&warp_mat);

	m_ImageType=imageType(workImg);
}
Exemple #8
0
IplImage *rotateImage(const IplImage *img, int angle) 
{
    IplImage *newImg;
    int newWidth, newHeight;
    int rectX, rectY;
    if (angle == -45 || angle == 45) {
        newWidth = (int) ((img->width + img->height) / sqrt(2.0));
        newHeight = (int) ((img->width + img->height) / sqrt(2.0));
        
    } else if (angle == -90 || angle == 90) {
        if (img->width > img->height) {
            newWidth = img->width;
            newHeight = img->width;
        } else {
            newWidth = img->height;
            newHeight = img->height;
        }
    } else {
        newWidth = img->width;
        newHeight = img->height;
    }
    
    newImg = cvCreateImage(cvSize(newWidth, newHeight), img->depth,
                           img->nChannels);
    cvSetZero(newImg);
    
    rectX = (int) ((newWidth - img->width) / 2);
    rectY = (int) ((newHeight - img->height) / 2);
    
    CvRect rect = cvRect(rectX, rectY, img->width, img->height);
    
    cvSetImageROI(newImg, rect);
    
    cvResize(img, newImg, CV_INTER_LINEAR);
    cvResetImageROI(newImg);
    
    IplImage *rotatedImg = cvCreateImage(cvGetSize(newImg), IPL_DEPTH_8U,
                                         img -> nChannels);
    
    CvPoint2D32f center;
    int xPos, yPos;
    
    xPos = (int) newWidth / 2;
    yPos = (int) newHeight / 2;
    
    CvMat *mapMatrix = cvCreateMat(2, 3, CV_32FC1);
    
    center.x = xPos;
    center.y = yPos;
    
    cv2DRotationMatrix(center, angle, 1.0, mapMatrix);
    cvWarpAffine(newImg, rotatedImg, mapMatrix, CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS, cvScalarAll(255.f));
    return rotatedImg;
}
void CamShiftPlugin::ProcessStatic
( int i, ImagePlus *img, ImagePlus *oimg, int *hsizes, CvTermCriteria criteria,
IplImage** &planes, CvHistogram* &hist, IplImage* &backproject, CvRect &orect, CvPoint &ocenter, CvRect &searchwin, CvMat* &rotation, CvMat* &shift, bool oready){
	if (hist && hist->mat.dim[0].size!=hsizes[0])
		cvReleaseHist(&hist);
	if( !hist )
        hist = cvCreateHist( 3, hsizes, CV_HIST_ARRAY, NULL, 0);
    if( !backproject )
		backproject = cvCreateImage( cvGetSize(img->orig), IPL_DEPTH_8U, 1 );
	if( !planes ){
	    planes = (IplImage**) malloc(3 * sizeof(IplImage*));
        for (int p=0; p<3; p++)
			planes[p] = cvCreateImage( cvGetSize(img->orig), 8, 1 );
	}
	if (!rotation)
		rotation = cvCreateMat(2,3,CV_32FC1);
	if (!shift)
		shift = cvCreateMat(2,1,CV_32FC1);

	if (!oready){
		orect = cvBoundingRect(oimg->contourArray[i],1);
		cvCvtPixToPlane( oimg->orig, planes[0], planes[1], planes[2], 0 );
        for (int p=0; p<3; p++)
            cvSetImageROI(planes[p],orect);
        cvCalcHist( planes, hist, 0, NULL );
		cvNormalizeHist(hist, 255);
        for (int p=0; p<3; p++)
            cvResetImageROI(planes[p]);
		searchwin = orect; //cvRect(0,0,img->orig->width, img->orig->height);
		ocenter = cvPoint(orect.x+orect.width/2, orect.y+orect.height/2);
	}
	//The following checks shouldn't be needed.
	RestrictRect(searchwin, cvRect(0,0,backproject->width,backproject->height));

	cvCvtPixToPlane( img->orig, planes[0], planes[1], planes[2], 0 );
    cvCalcBackProject( planes, backproject, hist );
	CvBox2D track_box;
	CvConnectedComp track_comp;
    cvCamShift( backproject, searchwin,
                criteria,
                &track_comp, &track_box );
	searchwin = track_comp.rect;
	cvmSet(shift,0,0,track_box.center.x - ocenter.x);
	cvmSet(shift,1,0,track_box.center.y - ocenter.y);
//	shift->data.fl[0] = track_box.center.x - ocenter.x;
//	shift->data.fl[1] = track_box.center.y - ocenter.y;
	cv2DRotationMatrix(track_box.center, track_box.angle, 1.0, rotation);
	cvTransform(oimg->contourArray[i],img->contourArray[i],rotation,shift);
//	CvMat *ofm = FeatPointsToMat(oimg->feats[i]);
//	Cvmat *fm  = FeatPointsToMat(img->feats[i]);
//	cvTransform(ofm,img->contourArray[i],rotation,shift);
	TransformFeatPoints(oimg->feats[i], img->feats[i], rotation, shift);
}
void
rotate_image(IplImage *img, double angle, double scale)
{
	CvSize img_size = cvGetSize(img);
	IplImage *tmp = cvCreateImage(img_size,img->depth, img->nChannels);
	CvMat *rotate = cvCreateMat(2,3,CV_32F);
	CvPoint2D32f center = cvPoint2D32f(
			((double)img_size.width)/2.0,
			((double)img_size.height)/2.0);
	cv2DRotationMatrix(center, angle, scale, rotate);
	cvWarpAffine(img, tmp, rotate, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0) );
	cvCopyImage(tmp, img);
	cvReleaseImage(&tmp);
}
Exemple #11
0
int main(int argc, char** argv)
{
   CvPoint2D32f srcTri[3], dstTri[3];
   CvMat* rot_mat = cvCreateMat(2,3,CV_32FC1);
   CvMat* warp_mat = cvCreateMat(2,3,CV_32FC1);
   IplImage *src, *dst;
    if( argc == 2 && ((src=cvLoadImage(argv[1],1)) != 0 ))
    {
   dst = cvCloneImage(src);
   dst->origin = src->origin;
   cvZero(dst);

   //COMPUTE WARP MATRIX
   srcTri[0].x = 0;          //src Top left
   srcTri[0].y = 0;
   srcTri[1].x = src->width - 1;    //src Top right
   srcTri[1].y = 0;
   srcTri[2].x = 0;          //src Bottom left
   srcTri[2].y = src->height - 1;
   //- - - - - - - - - - - - - - -//
   dstTri[0].x = src->width*0.0;    //dst Top left
   dstTri[0].y = src->height*0.33;
   dstTri[1].x = src->width*0.85; //dst Top right
   dstTri[1].y = src->height*0.25;
   dstTri[2].x = src->width*0.15; //dst Bottom left
   dstTri[2].y = src->height*0.7;
   cvGetAffineTransform(srcTri,dstTri,warp_mat);
   cvWarpAffine(src,dst,warp_mat);
   cvCopy(dst,src);

   //COMPUTE ROTATION MATRIX
   CvPoint2D32f center = cvPoint2D32f(src->width/2,
                                         src->height/2);
   double angle = -50.0;
   double scale = 0.6;
   cv2DRotationMatrix(center,angle,scale,rot_mat);
   cvWarpAffine(src,dst,rot_mat);

   //DO THE TRANSFORM:
   cvNamedWindow( "Affine_Transform", 1 );
      cvShowImage( "Affine_Transform", dst );
      cvWaitKey();
    }
   cvReleaseImage(&dst);
   cvReleaseMat(&rot_mat);
   cvReleaseMat(&warp_mat);
    return 0;
}
IplImage *rotateImage(const IplImage *src, int angleDegrees, double zoom)
{   
    IplImage *imageRotated = cvCloneImage(src);

   
    CvMat* rot_mat = cvCreateMat(2,3,CV_32FC1);
   
    // Compute rotation matrix
    CvPoint2D32f center = cvPoint2D32f( cvGetSize(imageRotated).width/2, cvGetSize(imageRotated).height/2 );
    cv2DRotationMatrix( center, angleDegrees, zoom, rot_mat );

    // Do the transformation
    cvWarpAffine( src, imageRotated, rot_mat );
   
    return imageRotated;
}
void rotateImage(const IplImage* src, IplImage* dst, double degree)
{
    // Only 1-Channel
    if(src->nChannels != 1)
        return;
    
    CvPoint2D32f    centralPoint    = cvPoint2D32f(src->width/2, src->height/2);            // 회전 기준점 설정(이미지의 중심점)
    CvMat*            rotationMatrix    = cvCreateMat(2, 3, CV_32FC1);                        // 회전 기준 행렬
    
    // Rotation 기준 행렬 연산 및 저장(90도에서 기울어진 각도를 빼야 본래이미지(필요시 수정))
    cv2DRotationMatrix(centralPoint, degree, 1, rotationMatrix);
    
    // Image Rotation
    cvWarpAffine(src, dst, rotationMatrix, CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS);
    
    // Memory 해제
    cvReleaseMat(&rotationMatrix);
}
void image_callback(const sensor_msgs::ImageConstPtr& msg)
{
	sensor_msgs::CvBridge bridge;
	try
	{
		last_img = bridge.imgMsgToCv(msg,"bgr8");
		if (count%sampling_rate==0)
		{
			//resize the image to fit exactly tablet screen
			CvSize size = cvSize(width,heigth);
			IplImage* tmp_img = cvCreateImage(size, last_img->depth, last_img->nChannels);
			cvResize(last_img, tmp_img);
			if (head_position<(-(MAX_HEAD_POS-MIN_HEAD_POS)/2))
                        { 
			    //printf("+++ rotating, head pos: %f\n",head_position);
			    IplImage* tmp_img_2 = cvCreateImage(size, last_img->depth, last_img->nChannels);
                            CvPoint2D32f pivot = cvPoint2D32f(half_width,half_heigth);
                            CvMat* rot_mat=cvCreateMat(2,3,CV_32FC1);
                            cv2DRotationMatrix(pivot,180,1,rot_mat);
                            cvWarpAffine(tmp_img,tmp_img_2,rot_mat);
			    sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(tmp_img_2,"bgr8");
			    pub.publish(msg);
			    cvReleaseImage(&tmp_img_2);
                        }
                        else
			{
			    //printf("+++ normal, head pos: %f",head_position);
			    sensor_msgs::ImagePtr msg = sensor_msgs::CvBridge::cvToImgMsg(tmp_img,"bgr8");
			    pub.publish(msg);
			}
                        cvReleaseImage(&tmp_img);
			count=0;
		}
		/*else
			printf("+\n");*/
		count++;
	}
	catch(sensor_msgs::CvBridgeException& e)
	{
		ROS_ERROR("cannot convert");
	}
}	
Exemple #15
0
IplImage* rotateImage3(const IplImage* image, float angle)
{ 

	int width = image->width;
	int height = image->height;

	IplImage *rotatedImage = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, image->nChannels);
     
    CvPoint2D32f center;
	center.x = width/2.0f;
	center.y = height/2.0f; 
   
    CvMat* mapMatrix = cvCreateMat(2, 3, CV_32FC1);
    CvMat* rot = cv2DRotationMatrix(center, angle, 1.0, mapMatrix);
    CvRect bbox = cv2DRotatedRect(center,src.size(), degrees).boundingRect();

    cvWarpAffine(image, rotatedImage, mapMatrix, CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
     
    return rotatedImage;
}
//根据角度进行旋转,角度为水平正方向,angle为弧度
void rotate_img(const IplImage* src,IplImage* dst,
				double angle,CvScalar fillVal)
{
	double scale = 1.0;
	double a = 0.0;	
	CvMat* rot_mat = cvCreateMat(2,3,CV_32FC1);
	CvPoint2D32f center = cvPoint2D32f(
		0,
		src->height
		);
	if(abs(angle) >= 0.2)
		a= -angle*180/CV_PI/3;
	cv2DRotationMatrix(center,a,scale,rot_mat);
	cvWarpAffine(src,dst,rot_mat,CV_WARP_FILL_OUTLIERS,fillVal);
	
	//copyMakeBorder(dst,dst,3,3,3,3,BORDER_REPLICATE);
	//cvResize(dst,dst,CV_INTER_LINEAR);
	/*cvShowImage("src",src);
	cvShowImage("dst",dst);
	cvWaitKey(0);*/
}
typename image<T, D>::create_new rotate(const image<T, D>& a, double angle,
	const float_point<2>& center, double scale, interpolation interp,
	warp_method warp_m, color<T, D::Ch> fill_color)
{
	IplImage* src = a.ipl();
	IplImage* dst = cvCreateImage(cvGetSize(src),
		image_details::ipl_depth<T>(), int(a.channels()));
	CvPoint2D32f cvcenter;
	cvcenter.x = center.x;
	cvcenter.y = center.y;
	CvMat* map_matrix = cvCreateMat(2, 3, image_details::cv_type<double>());
	cv2DRotationMatrix(cvcenter, angle, scale, map_matrix );
	cvWarpAffine(src, dst, map_matrix,
		image_details::interpolation_map[interp]+
		image_details::warp_map[warp_m] ,
		image_details::to_cvscalar(fill_color));
	typename image<T, D>::create_new r(dst);
	cvReleaseMat(&map_matrix);
	cvReleaseImage(&src);
	cvReleaseImage(&dst);
	return r;
}
void initialize(struct timeval *past)
{
	char scorechar[10];
	CvMat *rot_mat = cvCreateMat(2, 3, CV_32FC1);
	
	gettimeofday(past, NULL);
	
	// goal of computer handle
	gpc = cpc;
	com_change_goal = 0;
	
	// ball positon
	bpc = cvPoint2D32f(boundw+r0-winx/7,boundh/2);
	
	// ball velocity
	bv.x = 0;// 2004;
	bv.y = 0;//1000;
	
	// explosion effect
	explosr = 0;
	
	// score texture (computer)
	cvSetZero(scoretext1);
	cv2DRotationMatrix(cvPoint2D32f(rotatbox/2,rotatbox/2), -90.0, 1.0, rot_mat);
	if(score[0]>99) score[0] = 99;
	sprintf(scorechar, "%2d", score[0]);
	cvPutText(scoretext1, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontline,   blue);
	cvPutText(scoretext1, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontlight, white);
	cvWarpAffine(scoretext1, scoretext1, rot_mat);
	
	// score texture (user)
	cvSetZero(scoretext2);
	if(score[1]>99) score[1] = 99;
	sprintf(scorechar, "%d", score[1]);
	cvPutText(scoretext2, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontline,   blue);
	cvPutText(scoretext2, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontlight, white);
	cvWarpAffine(scoretext2, scoretext2, rot_mat);
}
void MapMaker::image_callback(const sensor_msgs::ImageConstPtr& msg) {
//  printf("callback called\n");
  try
	{
	
	// if you want to work with color images, change from mono8 to bgr8
	  if(input_image==NULL){
		  input_image = cvCloneImage(bridge.imgMsgToCv(msg, "mono8"));
		  rotationImage=cvCloneImage(input_image);
		 // printf("cloned image\n");
		}
		else{
		  cvCopy(bridge.imgMsgToCv(msg, "mono8"),input_image);
		 // printf("copied image\n");
		}
	}
	catch (sensor_msgs::CvBridgeException& e)
	{
		ROS_ERROR("Could not convert from '%s' to 'mono8'.", msg->encoding.c_str());
		return;
	}
	
	if(input_image!=NULL) {
    //get tf transform here and put in map
    ros::Time acquisition_time = msg->header.stamp;
    geometry_msgs::PoseStamped basePose;
    geometry_msgs::PoseStamped mapPose;
    basePose.pose.orientation.w=1.0;
    ros::Duration timeout(3);
    basePose.header.frame_id="/base_link";
    mapPose.header.frame_id="/map";
    try {
      tf_listener_.waitForTransform("/base_link", "/map", acquisition_time, timeout);
       
      tf_listener_.transformPose("/map", acquisition_time,basePose,"/base_link",mapPose);
	    
	    printf("pose #%d %f %f %f\n",pic_number,mapPose.pose.position.x, mapPose.pose.position.y, tf::getYaw(mapPose.pose.orientation));
	    
	    
	    /*
	    char buffer [50];
	    sprintf (buffer, "/tmp/test%02d.jpg", pic_number);
			if(!cvSaveImage(buffer,input_image,0)) printf("Could not save: %s\n",buffer);
			else printf("picture taken!!!\n");
	    pic_number++;
	    */
	    
	    cv::Point_<double> center;
      center.x=input_image->width/2;
      center.y=input_image->height/2;
      double tranlation_arr[2][3];
      CvMat translation;
      cvInitMatHeader(&translation,2,3,CV_64F,tranlation_arr);
      
      cvSetZero(&translation);
      cv2DRotationMatrix(center, (tf::getYaw(mapPose.pose.orientation)*180/3.14159) -90,1.0,&translation);
      cvSetZero(rotationImage);
      cvWarpAffine(input_image,rotationImage,&translation,CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS,cvScalarAll(0));
      
      
      CvRect roi;
      roi.width=rotationImage->width;
      roi.height=rotationImage->height;
      
      if(init_zero_x==0){
        init_zero_x=(int)(mapPose.pose.position.x*(1.0/map_meters_per_pixel));
        init_zero_y=(int)(mapPose.pose.position.y*(-1.0/map_meters_per_pixel));
      }
      
      roi.x=(int)(mapPose.pose.position.x*(1.0/map_meters_per_pixel))-init_zero_x+map_zero_x-roi.width/2;
      roi.y=(int)(mapPose.pose.position.y*(-1.0/map_meters_per_pixel))-init_zero_y+map_zero_y-roi.height/2;
      
      printf("x %d, y %d, rot %f\n",roi.x,roi.y, (tf::getYaw(mapPose.pose.orientation)*180/3.14159) -90);
      
      cvSetImageROI(map,roi);
      
      cvMax(map,rotationImage,map);
      
      cvResetImageROI(map);
	    cvShowImage("map image",map);	    
    }
    catch (tf::TransformException& ex) {
      ROS_WARN("[map_maker] TF exception:\n%s", ex.what());
      printf("[map_maker] TF exception:\n%s", ex.what());
      return;
    }
    catch(...){
      printf("opencv shit itself cause our roi is bad\n");
    }
  }
}
Exemple #20
0
int main (int argc, const char * argv[])
{
    if ( argc != 2 )
    {
        fprintf(stderr, "Usage: <image>\n");
        exit(1);
    }

    IplImage* image = cvLoadImage(argv[1], CV_LOAD_IMAGE_GRAYSCALE);

    if ( image == NULL )
    {
        fprintf(stderr, "Couldn't load image %s\n", argv[1]);
        exit(1);
    }

    IplImage* dst = cvCloneImage(image);
    //cvSetZero(dst);

    CvMat* rotation = cvCreateMat(2, 3, CV_32FC1);

#if 0
    // Optimized for a finding 3 pixel wide lines.
    float zeroDegreeLineData[] = {
        -10, -10, -10, -10, -10,
        3, 3, 3, 3, 3,
        14, 14, 14, 14, 14,
        3, 3, 3, 3, 3,
        -10, -10, -10, -10, -10
    };
#if 0
    float zeroDegreeLineData[] = {
        10, 10, 10, 10, 10,
        -3, -3, -3, -3, -3,
        -14, -14, -14, -14, -14,
        -3, -3, -3, -3, -3,
        10, 10, 10, 10, 10
    };
#endif

    CvMat zeroDegreeLine = cvMat(5, 5, CV_32FC1, zeroDegreeLineData);
    PrintMat("Zero Degree Line", &zeroDegreeLine);

    cv2DRotationMatrix(cvPoint2D32f(2,2), 60.0, 1.0, rotation);

    CvMat* kernel = cvCreateMat(5, 5, CV_32FC1);

#else
    // Optimized for finding 1 pixel wide lines. The sum of all co-efficients is 0, so this kernel has
    // the tendency to send pixels towards zero
#if 0
    float zeroDegreeLineData[] = {
        10, 10, 10,
        -20, -20, -20,
        10, 10, 10
    };
#elif 0
    float zeroDegreeLineData[] = {
        -10, -10, -10,
        20, 20, 20,
        -10, -10, -10
    };
#else
    // Line detector optimized to find a horizontal line 1 pixel wide that is darker (smaller value) than it’s surrounding pixels. This works because darker (smaller value) horizontal 1 pixel wide lines will have a smaller magnitude negative
    // component, which means their convoluted value will be higher than surrounding pixels. See Convolution.numbers
    // for a simple example how this works.
    float zeroDegreeLineData[] = {
        1, 1, 1,
        -2, -2, -2,
        1, 1, 1
    };
#endif

    CvMat zeroDegreeLine = cvMat(3, 3, CV_32FC1, zeroDegreeLineData);
    PrintMat("Zero Degree Line", &zeroDegreeLine);

    // Going to rotate the horizontal line detecting kernel by 60 degrees to that it will detect 60 degree lines.
    cv2DRotationMatrix(cvPoint2D32f(1,1), 60.0, 1.0, rotation);

    CvMat* kernel = cvCreateMat(3, 3, CV_32FC1);

#endif

    PrintMat("Rotation", rotation);

    cvWarpAffine(&zeroDegreeLine, kernel, rotation, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
    PrintMat("Kernel", kernel);

    cvFilter2D( image, dst, kernel, cvPoint(-1,-1));

    cvNamedWindow("main", CV_WINDOW_NORMAL);
    cvShowImage("main", image);
    cvWaitKey(0);

    cvShowImage("main", dst);
    cvWaitKey(0);

    return 0;
}
Exemple #21
0
//Show ROI Image,and Save ROI Image and it's coordinates
void Harrlike::SaveROI(IplImage *Img,CvSeq *faces)
{
	CvSize new_size = cvSize(m_width,m_height);
	CvRect ROI;
	double  Ratio = Img->width/1000.;
	double  m,angle;
	int ratioX = 2,ratioY = 2;
	
	//Rotate
	if((m_TraningCoord[7].x - m_TraningCoord[1].x)!=0)
	{
	   m = static_cast<double>(m_TraningCoord[7].y - m_TraningCoord[1].y)/(m_TraningCoord[7].x - m_TraningCoord[1].x);
	   angle = atan(m)*57.25972;
	}
	else
	{
		angle = 0;
	}
	CvPoint2D32f  src_center  = cvPoint2D32f(m_TraningCoord[1].x*1.0,  m_TraningCoord[1].y*1.0);    
	
	float map[6];
	CvMat rot_mat = cvMat( 2, 3, CV_32F,map);
	cv2DRotationMatrix( src_center, angle,1.0, &rot_mat);  
	IplImage* dst = cvCreateImage(cvSize(Img->width,Img->height),8,1);
    cvWarpAffine(Img, dst, &rot_mat);
	//cvShowImage("2",dst);
	//cvWaitKey(0);
	double Coord3X = m_TraningCoord[3].x*cvGetReal2D(&rot_mat,0,0) + m_TraningCoord[3].y*cvGetReal2D(&rot_mat,0,1) + cvGetReal2D(&rot_mat,0,2)*1.0;
	double Coord3Y = m_TraningCoord[3].x*cvGetReal2D(&rot_mat,1,0) + m_TraningCoord[3].y*cvGetReal2D(&rot_mat,1,1) + cvGetReal2D(&rot_mat,1,2)*1.0;
	double Coord9X = m_TraningCoord[9].x*cvGetReal2D(&rot_mat,0,0) + m_TraningCoord[9].y*cvGetReal2D(&rot_mat,0,1) + cvGetReal2D(&rot_mat,0,2)*1.0;
	double Coord9Y = m_TraningCoord[9].x*cvGetReal2D(&rot_mat,1,0) + m_TraningCoord[9].y*cvGetReal2D(&rot_mat,1,1) + cvGetReal2D(&rot_mat,1,2)*1.0;
	double Coord1X = m_TraningCoord[1].x*cvGetReal2D(&rot_mat,0,0) + m_TraningCoord[1].y*cvGetReal2D(&rot_mat,0,1) + cvGetReal2D(&rot_mat,0,2)*1.0;
	double Coord0Y = m_TraningCoord[0].x*cvGetReal2D(&rot_mat,1,0) + m_TraningCoord[0].y*cvGetReal2D(&rot_mat,1,1) + cvGetReal2D(&rot_mat,1,2)*1.0;

	//鼻子中間

	double middleX = (Coord3X + Coord9X)*0.5;
	double middleY = (Coord3Y + Coord9Y)*0.5;
	//取人臉出來
	ROI.x = Coord1X;
	ROI.y = Coord0Y;
	ROI.width  = (middleX-ROI.x)*ratioX;//600
	ROI.height = (middleY - Coord0Y)*ratioY;//m_TraningCoord[11].y-m_TraningCoord[0].y
	
	cvSetImageROI(dst, ROI);
	ResizeImage(dst,new_size);
	
	/*double multipleX = (m_width*1.0/ROI.width*1.0);
	double multipleY = (m_height*1.0/ROI.height*1.0);

	FILE *NormalizeCoord = fopen("..\\DataBase\\NormalizeCoord.xls","a");
	for(int i = 0;i < 12;i++)
	{
	int normalizeX = (m_TraningCoord[i].x - ROI.x)*multipleX;
	int normalizeY = (m_TraningCoord[i].y - ROI.y)*multipleY;
	if(normalizeX > m_width) normalizeX = m_width;
	if(normalizeY > m_height) normalizeY = m_height;
	if(normalizeX < 0)   normalizeX = 0;
	if(normalizeY < 0)   normalizeY = 0;
	fprintf(NormalizeCoord,"%d\t%d\t",normalizeX,normalizeY);
	}
	fprintf(NormalizeCoord,"\n");
	fclose(NormalizeCoord);*/

	FILE *Coord = fopen("..\\DataBase\\Coordinate.xls","a");
	for(int i = 0;i < 12;i++)
	{
		fprintf(Coord,"%d\t%d\t",m_TraningCoord[i].x,m_TraningCoord[i].y);
	}
	fprintf(Coord,"\n");
	fclose(Coord);
	//cvReleaseImage(&Img);
	//cvSaveImage(filename,Img);
	//cvShowImage("roi",Img);
	//cvWaitKey(0);
}
bool EyeCoord2FaceCrop( IplImage * pic8, CvMat * faceImg8,  CvPoint2D32f leftEye, 
					   CvPoint2D32f rightEye, bool useBuf )
{
	static int idx = 0;
	static bool bInited = false;
	CvPoint2D32f l1 = cvPoint2D32f(0,0), r1 = cvPoint2D32f(0,0);
	if (useBuf) // when detect face in a video stream, one may hope the position changes more smoothly. So we smooth the eye coordinates between each call
	{
		g_lefta[idx] = leftEye;
		g_righta[idx++] = rightEye;
		idx %= g_nFiltLevel;

		if (!bInited)
		{
			for (int i = 1; i < g_nFiltLevel; i++)
			{
				g_lefta[i] = leftEye;
				g_righta[i] = rightEye;
			}
		}

		for (int i = 0; i < g_nFiltLevel; i++) // smooth the coordinates
		{
			l1.x += g_lefta[i].x/g_nFiltLevel;
			l1.y += g_lefta[i].y/g_nFiltLevel;
			r1.x += g_righta[i].x/g_nFiltLevel;
			r1.y += g_righta[i].y/g_nFiltLevel;
		}
	}
	else
	{
		l1 = leftEye;
		r1 = rightEye;
	}

	float xDis = r1.x - l1.x,
		yDis = r1.y - l1.y;

	g_angle = cvFastArctan(yDis, xDis);
	g_dis = sqrt(xDis*xDis + yDis*yDis);


	CvMat *map = cvCreateMat(2, 3, CV_32FC1);
	CvMat *largePic8 = cvCreateMat(pic8->height*2, pic8->width*2, CV_8UC1); // in case the cropped face goes out of the border
	CvMat *tmpDst = cvCreateMat(largePic8->height, largePic8->width, CV_8UC1);
	cvCopyMakeBorder(pic8, largePic8, cvPoint(pic8->width/2, pic8->height/2), IPL_BORDER_REPLICATE);

	l1.x += pic8->width/2;
	l1.y += pic8->height/2;
	cv2DRotationMatrix(l1, g_angle, g_normDis/g_dis, map); // similar transformation
	//DispCvArr(map, "map");
	cvWarpAffine(largePic8, tmpDst, map);
	//cvShowImage("a",tmpDst);
	//cvWaitKey();

	int		leftEyeXNew = cvRound((g_faceSz.width - g_normDis)/2);
	int		left = cvRound(l1.x - leftEyeXNew),
		top = cvRound(l1.y - g_normRow);
	CvMat	tmpHeader, *sub = 0;

	if (left >= 0 && top >= 0 &&
		left + g_faceSz.width <= tmpDst->width &&
		top + g_faceSz.height <= tmpDst->height)
	{	
		sub = cvGetSubRect(tmpDst, &tmpHeader, cvRect(left, top, g_faceSz.width, g_faceSz.height));
		cvCopy(sub, faceImg8);
		//cvShowImage("f",faceImg8);
		//cvWaitKey();
	}

	cvReleaseMat(&map);
	cvReleaseMat(&largePic8);
	cvReleaseMat(&tmpDst);
	return (sub != 0);
}
bool  FaceCrop::Crop(CvPoint LE , CvPoint RE , CvPoint No , CvPoint Mo){
	
	CvPoint eyeVector;
	double rotation=0.0;
	IplImage* rotationImg=cvCreateImage(cvSize(srcImg->width,srcImg->height),IPL_DEPTH_8U,3);
	double eyeDistance;

	eyeVector.x=LE.x-RE.x;
	eyeVector.y=LE.y-RE.y;
	eyeDistance=sqrt((double)(eyeVector.x*eyeVector.x + eyeVector.y*eyeVector.y));
	rotation=atan2((double)eyeVector.y , (double)eyeVector.x) * 180 / CV_PI+180;
	

	CvMat *matrix=NULL;
	if(rotation > maxRotationAngle){
		matrix=cvCreateMat(2,3,CV_32FC1);
		matrix=cv2DRotationMatrix(cvPoint2D32f(LE.x,LE.y),rotation,1,matrix);
		cvWarpAffine( srcImg,rotationImg,matrix,CV_WARP_FILL_OUTLIERS,cvScalarAll(0) );
	}
	else{
		cvCopy(srcImg,rotationImg);
	}
	

	eyeDistance=(int)eyeDistance;
	int x=LE.x-(int)(a*eyeDistance);
	int y=LE.y-(int)(b*eyeDistance);
	int width= (int)(a*eyeDistance)+(int)(a*eyeDistance)+eyeDistance+1;
	int height=(int)(b*eyeDistance)+(int)(c*eyeDistance)+1;
	if(x<0)x=0;
	if(y<0)y=0;
	if(x+width>=rotationImg->width)width=rotationImg->width-x-1;
	if(y+height>=rotationImg->height)height=rotationImg->height-y-1;

	cvSetImageROI(rotationImg,cvRect(x , y , width , height));
	cropImg=cvCreateImage(cvSize(width,height),IPL_DEPTH_8U,3);
	cvCopy(rotationImg,cropImg);
	cvResetImageROI(rotationImg);

	normalizeImg=cvCreateImage(cvSize(normalizeWidth,normalizeHeight),IPL_DEPTH_8U,3);
	cvResize(cropImg,normalizeImg);
	
	if(matrix!=NULL){

		matrix=cv2DRotationMatrix(cvPoint2D32f(LE.x,LE.y),-rotation,1,matrix);

		CvMat *pointMatrix=cvCreateMat(3,1,CV_32FC1);
		CvMat *rotatedPointMatrix=cvCreateMat(2,1,CV_32FC1);
		cvmSet(pointMatrix,0,0,x);
		cvmSet(pointMatrix,1,0,y);
		cvmSet(pointMatrix,2,0,1);
		cvmMul(matrix,pointMatrix,rotatedPointMatrix);
		leftTop.x=cvmGet(rotatedPointMatrix,0,0);
		leftTop.y=cvmGet(rotatedPointMatrix,1,0);
		
		cvmSet(pointMatrix,0,0,x+width);
		cvmSet(pointMatrix,1,0,y);
		cvmMul(matrix,pointMatrix,rotatedPointMatrix);
		rightTop.x=cvmGet(rotatedPointMatrix,0,0);
		rightTop.y=cvmGet(rotatedPointMatrix,1,0);
		
		cvmSet(pointMatrix,0,0,x);
		cvmSet(pointMatrix,1,0,y+height);
		cvmMul(matrix,pointMatrix,rotatedPointMatrix);
		leftdown.x=cvmGet(rotatedPointMatrix,0,0);
		leftdown.y=cvmGet(rotatedPointMatrix,1,0);
		
		cvmSet(pointMatrix,0,0,x+width);
		cvmSet(pointMatrix,1,0,y+height);
		cvmMul(matrix,pointMatrix,rotatedPointMatrix);
		rightdown.x=cvmGet(rotatedPointMatrix,0,0);
		rightdown.y=cvmGet(rotatedPointMatrix,1,0);
	}
	else{
		leftTop.x=x;
		leftTop.y=y; 
		rightTop.x=x+width;
		rightTop.y=y;
		leftdown.x=x;
		leftdown.y=y+height;
		rightdown.x=x+width;
		rightdown.y=y+height;
	}
	
	//cvSaveImage("result.jpg",cropImg);

	//cvNamedWindow("Img",1);
	//cvShowImage("Img",cropImg);
	//cvWaitKey(0);

	cvReleaseImage(&rotationImg);
	cvReleaseImage(&cropImg);

	return true;

	
}
Exemple #24
0
//左上腕(LEFT_ELBOW - LEFT_SHOULDER)に画像をコラージュ
void drawLeftArm(xn::DepthGenerator& depth, xn::SkeletonCapability& capability, XnUserID user, XnMapOutputMode mapMode, IplImage* preLoadImage, IplImage **rgbImage) {
	XnSkeletonJointPosition pos1,pos2;
  
    IplImage *partImage = NULL; 
    IplImage *fallImage = NULL; 
    
	// elbowのジョイント座標の取得
	capability.GetSkeletonJointPosition(user, XN_SKEL_LEFT_ELBOW, pos1);
	XnPoint3D pReal1[1] = {pos1.position};
	XnPoint3D pProjective1[1];
	// 世界座標系から表示座標系に変換した座標を取得
	depth.ConvertRealWorldToProjective(1, pReal1, pProjective1);
    
    // sholderのジョイント座標の取得
	capability.GetSkeletonJointPosition(user, XN_SKEL_LEFT_SHOULDER, pos2);
	XnPoint3D pReal2[1] = {pos2.position};
	XnPoint3D pProjective2[1];
	// 世界座標系から表示座標系に変換した座標を取得
	depth.ConvertRealWorldToProjective(1, pReal2, pProjective2);
    
    // 重ね合わせよう画像
    partImage = preLoadImage;
    fallImage = cvCreateImage(cvSize(mapMode.nXRes, mapMode.nYRes), IPL_DEPTH_8U, 3);    
    CvPoint2D32f original[3];
    CvPoint2D32f transform[3];
    
    original[0] = cvPoint2D32f(0, 0);
    original[1] = cvPoint2D32f(mapMode.nXRes, 0);
    original[2] = cvPoint2D32f( 0, mapMode.nYRes);
    
    CvSize sizeOfPart = cvGetSize(partImage);
    
    //ELBOWの位置
    int transX1 = pProjective1[0].X;
    int transY1 = pProjective1[0].Y;
    //SHOULDERの位置
    int transX2 = pProjective2[0].X;
    int transY2 = pProjective2[0].Y;
    //中間の座標
    int transX3 = (transX1 + transX2) / 2;
    int transY3 = (transY1 + transY2) / 2;
    //画像の始点
    int transX = transX3 - (sizeOfPart.width / 2);
    int transY = transY3 - (sizeOfPart.height / 2);
    //角度
    float ang = cvFastArctan(transY2 - transY1, transX2 - transX1); //+ cvFastArctan(transY1, transX1);
    
    transform[0] = cvPoint2D32f( transX, transY);
    transform[1] = cvPoint2D32f( transX + mapMode.nXRes, transY);
    transform[2] = cvPoint2D32f( transX, transY + mapMode.nYRes);
    
    // 行列作成
    CvMat *affineMatrix = cvCreateMat(2, 3, CV_32FC1);
    cvGetAffineTransform(original, transform, affineMatrix);
    
    // 移動
    cvWarpAffine(partImage, fallImage, affineMatrix, CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
    
    
    //回転行列作成
    CvPoint2D32f center = cvPoint2D32f(transX3, transY3);
    IplImage *fallImage2 = cvCreateImage(cvSize(mapMode.nXRes, mapMode.nYRes), IPL_DEPTH_8U, 3); 
    CvMat *rotationMatrix = cvCreateMat(2, 3, CV_32FC1);
    cv2DRotationMatrix(center, 90.0 - ang, 1.0, rotationMatrix);
    //回転
    cvWarpAffine(fallImage, fallImage2, rotationMatrix, CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
    
    // 画像の重ね合わせ
    fallPartImage(fallImage2, *rgbImage);
    
    // 解放
    cvReleaseImage(&fallImage);	
    cvReleaseImage(&fallImage2);	
}
Exemple #25
0
ReturnType Rotate::onExecute()
{
	// 영상을 Inport로부터 취득
	opros_any *pData = ImageIn.pop();
	RawImage result;

	if(pData != NULL){
		
		// 포트로 부터 이미지 취득
		RawImage Image = ImageIn.getContent(*pData);
		RawImageData *RawImage = Image.getImage();

		// 현재영상의 크기를 취득
		m_in_width = RawImage->getWidth();
		m_in_height = RawImage->getHeight();

		// 원본영상의 이미지영역 확보
		if(m_orig_img == NULL){
			m_orig_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		}
		if(m_trans_img == NULL){
			m_trans_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		}
		if(m_result_img == NULL){
			m_result_img = cvCreateImage(cvSize(m_in_width, m_in_height), IPL_DEPTH_8U, 3);
		}

		// 영상에 대한 정보를 확보!memcpy
		memcpy(m_orig_img->imageData, RawImage->getData(), RawImage->getSize());

		//*************************회전
		// 회전 중심 위치 설정 
		CvPoint2D32f center = cvPoint2D32f( m_orig_img->width/2.0, m_orig_img->height/2.0);
		// 회전각도에 따른 행렬생성
		cv2DRotationMatrix( center, m_angle, 1.0, m_rotate_mat );
		// cv2DRotationMatrix( center, 50.0, 1.0, m_rotate_mat );
		// 이미지회전
		cvWarpAffine( m_orig_img, m_result_img, m_rotate_mat, 
						CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS, cvScalarAll(0));

		// RawImage의 이미지 포인터 변수 할당
		RawImageData *pimage = result.getImage();
		
		// 입력된 이미지 사이즈 및 채널수로 로 재 설정
		pimage->resize(m_result_img->width, m_result_img->height, m_result_img->nChannels);
		
		// 영상의 총 크기(pixels수) 취득
		int size = m_result_img->width * m_result_img->height * m_result_img->nChannels;
		
		// 영상 데이터로부터 영상값만을 할당하기 위한 변수
		unsigned char *ptrdata = pimage->getData();
		
		// 현재 프레임 영상을 사이즈 만큼 memcpy
		memcpy(ptrdata, m_result_img->imageData, size);

		// 포트아웃
		// opros_any mdata = result;
		ImageOut.push(result);//전달

		delete pData;
	}

	return OPROS_SUCCESS;
}
int main(int argc, char* argv[])
{
	// Set up variables
	CvPoint2D32f srcTri[3], dstTri[3];
	CvMat* rot_mat = cvCreateMat(2,3,CV_32FC1);
	CvMat* warp_mat = cvCreateMat(2,3,CV_32FC1);
	IplImage *src, *dst;
	const char* name = "Affine_Transform";

	// Load image
	src=cvLoadImage("airplane.jpg");
	dst = cvCloneImage( src );
	dst->origin = src->origin;
	cvZero( dst );
	cvNamedWindow( name, 1 );

	// Create angle and scale
	double angle = 0.0;
	double scale = 1.0;

	// Create trackbars
	cvCreateTrackbar( "Angle", name, &angle_switch_value, 4, switch_callback_a );
	cvCreateTrackbar( "Scale", name, &scale_switch_value, 4, switch_callback_s );

	// Compute warp matrix
	srcTri[0].x = 0;
	srcTri[0].y = 0;
	srcTri[1].x = src->width - 1;
	srcTri[1].y = 0;
	srcTri[2].x = 0;
	srcTri[2].y = src->height - 1;

	dstTri[0].x = src->width*0.0;
	dstTri[0].y = src->height*0.25;
	dstTri[1].x = src->width*0.90;
	dstTri[1].y = src->height*0.15;
	dstTri[2].x = src->width*0.10;
	dstTri[2].y = src->height*0.75;

	cvGetAffineTransform( srcTri, dstTri, warp_mat );
	cvWarpAffine( src, dst, warp_mat );
	cvCopy ( dst, src );

	while( 1 ) {
		switch( angleInt ){
			case 0:
				angle = 0.0;
				break;
			case 1:
				angle = 20.0;
				break;
			case 2:
				angle = 40.0;
				break;
			case 3:
				angle = 60.0;
				break;
			case 4:
				angle = 90.0;
				break;
		}
		switch( scaleInt ){
			case 0:
				scale = 1.0;
				break;
			case 1:
				scale = 0.8;
				break;
			case 2:
				scale = 0.6;
				break;
			case 3:
				scale = 0.4;
				break;
			case 4:
				scale = 0.2;
				break;
		}

		// Compute rotation matrix
		CvPoint2D32f center = cvPoint2D32f( src->width/2, src->height/2 );
		cv2DRotationMatrix( center, angle, scale, rot_mat );

		// Do the transformation
		cvWarpAffine( src, dst, rot_mat );

		cvShowImage( name, dst );

		if( cvWaitKey( 15 ) == 27 )
			break;
	}

	cvReleaseImage( &dst );
	cvReleaseMat( &rot_mat );
	cvReleaseMat( &warp_mat );

	return 0;
}