예제 #1
0
IplImage * MyRotate::doRotate( IplImage *src, double angle )
{
	if (src == NULL)
	{
		return NULL;
	}
	IplImage *dst= cvCreateImage(cvGetSize(src),IPL_DEPTH_8U,3);

	dst = cvCloneImage (src);

	float m[6];
	// Matrix m looks like:
	//
	// [ m0  m1  m2 ] ===>  [ A11  A12   b1 ]
	// [ m3  m4  m5 ]       [ A21  A22   b2 ]
	//
	CvMat M = cvMat (2, 3, CV_32F, m);
	int w = src->width;
	int h = src->height;

	m[0] = (float) ( cos (-angle * CV_PI / 180.));
	m[1] = (float) ( sin (-angle * CV_PI / 180.));
	m[3] = -m[1];
	m[4] = m[0];
	// move rotate center to image center
	m[2] = w * 0.5f;
	m[5] = h * 0.5f;
	//  dst(x,y) = A * src(x,y) + b
	cvZero (dst);
	cvGetQuadrangleSubPix (src, dst, &M);

	return dst;
}
예제 #2
0
IplImage* RotateImage(const IplImage* pSource, double dAngle)
{
	if (pSource == NULL)
	{
		printf("RotateImage input image is NULL!\n");
		return NULL;
	}
	
	if (dAngle == 0)
	{
		return cvCloneImage(pSource);
	}
	
	CvSize iSize = cvGetSize(pSource);
	double dCos = cos(dAngle * CV_PI / 180.0);
	double dSin = sin(dAngle * CV_PI / 180.0);
	
	CvMat* pRotateMat = cvCreateMat(2, 3, CV_32FC1);
	cvmSet(pRotateMat, 0, 0, dCos);
	cvmSet(pRotateMat, 0, 1, dSin);
	cvmSet(pRotateMat, 0, 2, iSize.width / 2);
	cvmSet(pRotateMat, 1, 0, -dSin);
	cvmSet(pRotateMat, 1, 1, dCos);
	cvmSet(pRotateMat, 1, 2, iSize.height / 2);
	
	IplImage* pRotatedImage = cvCloneImage(pSource);
	cvGetQuadrangleSubPix(pSource, pRotatedImage, pRotateMat);
	
	cvReleaseMat(&pRotateMat);
	
	return pRotatedImage;
}
예제 #3
0
파일: opencv.c 프로젝트: I-am-Gabi/opencv
// Rotate the imsage clockwise (or counter-clockwise if negative).
// Remember to free the returned image.
IplImage* rotateImage1(const IplImage* img, float angleDegrees)
{
	// Create a map_matrix, where the left 2x2 matrix
	// is the transform and the right 2x1 is the dimensions.
	float m[6];
	CvMat M = cvMat(2, 3, CV_32F, m);
	int width = img->width;
	int height = img->height;
	float angleRadians = angleDegrees * ((float)CV_PI / 180.0f);
	m[0] = (float)( cos(angleRadians) );
	m[1] = (float)( sin(angleRadians) );
	m[3] = -m[1];
	m[4] = m[0];
	m[2] = width * 0.5f;  
	m[5] = height * 0.5f;  

	// Make a spare image for the result
	CvSize sizeRotated;
	sizeRotated.width = cvRound(width);
	sizeRotated.height = cvRound(height);

	// Rotate
	IplImage* imageRotated = cvCreateImage( 
		sizeRotated,
		img->depth, 
		img->nChannels 
	);

	// Transform the image
	cvGetQuadrangleSubPix(img, imageRotated, &M);

	return imageRotated;
}
예제 #4
0
void mcvGetQuadrangleSubPix(IplImage * src, IplImage * dest, affinity * A,
                            int /*fill_outliers*/, CvScalar /*fill_value*/)
{
  A->rows = 2;
  cvGetQuadrangleSubPix(src, dest, A);
  A->rows = 3;
}
예제 #5
0
파일: EVUtil.cpp 프로젝트: douzsh/douzsh
void EVUtil::RotateImage( IplImage* pSrc,IplImage* pDst,double dAngle )
{
	if ( !pSrc || !pDst )
	{
		return;
	}
	float m[6];
	// Matrix m looks like:
	//
	// [ m0  m1  m2 ] ===>  [ A11  A12   b1 ]
	// [ m3  m4  m5 ]       [ A21  A22   b2 ]
	//
	CvMat M = cvMat (2, 3, CV_32F, m);
	int w = (pSrc)->width;
	int h = (pSrc)->height;
	m[0] = (float) (cos (-dAngle * 2 * CV_PI / 180.));
	m[1] = (float) (sin (-dAngle * 2 * CV_PI / 180.));
	m[3] = -m[1];
	m[4] = m[0];
	// 将旋转中心移至图像中间
	m[2] = w * 0.5f;
	m[5] = h * 0.5f;
	//  dst(x,y) = A * src(x,y) + b
	cvZero (pDst);
	cvGetQuadrangleSubPix (pSrc, pDst, &M);
	return;
}
예제 #6
0
void EyeExtractor::extractLeftEye(const IplImage *origimage, double x1, double y1, double x0, double y0) 
    throw (TrackingException) 
{
    //if (!tracker.status[tracker.eyepoint1])
	//throw TrackingException();

    double factor = 0.17;
    double xfactor = 0.05;
    double yfactor = 0.20 * (x0 < x1 ? -1 : 1);
    double L = factor / eyedx;
    double LL = x0 < x1? L : -L;
    float matrix[6] = 
	{LL*(x1-x0), LL*(y0-y1), 
	 x0 + factor * ((1-xfactor)*(x1-x0) + yfactor * (y0-y1)),
	 LL*(y1-y0), LL*(x1-x0), 
	 y0 + factor * ((1-xfactor)*(y1-y0) + yfactor * (x1-x0))};
    CvMat M = cvMat( 2, 3, CV_32F, matrix );

	float matrix2[6] = 
	{LL*(x1-x0), LL*(y0-y1), 
	 x0 + 2*64 + factor * ((1-xfactor)*(x1-x0) + yfactor * (y0-y1)),
	 LL*(y1-y0), LL*(x1-x0), 
	 y0 + factor * ((1-xfactor)*(y1-y0) + yfactor * (x1-x0))};
    
    cvGetQuadrangleSubPix( origimage, eyeimage_left.get(), &M);
    cvCvtColor(eyeimage_left.get(), eyegrey_left.get(), CV_RGB2GRAY);
}
예제 #7
0
void COpenCVMFCView::OnRotation30()
{
	// TODO: Add your command handler code here

	int angle = 30;                         //  Rotate 30 degree
	int opt = 0;                            //  1: with resize   0: just rotate
	double factor;                          //  resize factor
	IplImage *pImage;
	IplImage *pImgRotation = NULL;

	pImage = workImg;
	pImgRotation = cvCloneImage(workImg);

	angle = -angle;

	//  Create M Matrix
	float m[6];
	//      Matrix m looks like:
	//      [ m0  m1  m2 ] ----> [ a11  a12  b1 ]
	//      [ m3  m4  m5 ] ----> [ a21  a22  b2 ]

	CvMat M = cvMat(2,3,CV_32F,m);
	int w = workImg->width;
	int h = workImg->height;

	if (opt)
		factor = (cos(angle*CV_PI/180.)+1.0)*2;
	else 
		factor = 1;

	m[0] = (float)(factor*cos(-angle*CV_PI/180.));
	m[1] = (float)(factor*sin(-angle*CV_PI/180.));
	m[3] = -m[1];
	m[4] =  m[0];
	//  Make rotation center to image center
	m[2] = w*0.5f;
	m[5] = h*0.5f;

	//---------------------------------------------------------
	//  dst(x,y) = A * src(x,y) + b
	cvZero(pImgRotation);
	cvGetQuadrangleSubPix(pImage,pImgRotation,&M);
	//---------------------------------------------------------

	cvNamedWindow("Rotation Image");
	cvFlip(pImgRotation);
	cvShowImage("Rotation Image",pImgRotation);

	cvReleaseImage(&pImgRotation);

	cvWaitKey(0);

	cvDestroyWindow("Rotation Image");
}
예제 #8
0
// 以点center为旋转中心,对src旋转angle度并缩放factor倍。
void RotateImage(IplImage *src, IplImage *dst, CvPoint center, float angle,
		float factor) {
	float m[6];
	CvMat mat = cvMat(2, 3, CV_32FC1, m);
	m[0] = (float) (factor * cos(-angle * CV_PI / 180.));
	m[1] = (float) (factor * sin(-angle * CV_PI / 180.));
	m[2] = center.x;
	m[3] = -m[1];
	m[4] = m[0];
	m[5] = center.y;
	cvSetZero(dst);
	cvGetQuadrangleSubPix(src, dst, &mat);
}
예제 #9
0
typename image<T, D>::create_new get_quadrange_sub_pix(const image<T, D>& a,
		const array_2d<double>& map_matrix, const size_array& dst_size)
{
	IplImage* src = a.ipl();
	IplImage* dst;

	if(dst_size[0] == 0 && dst_size[1] == 0) {
		dst = cvCreateImage(cvGetSize(src),
		image_details::ipl_depth<T>(), int(a.channels()));
	} else {
		dst = cvCreateImage(cvSize(dst_size[1], dst_size[0]),
		image_details::ipl_depth<T>(), int(a.channels()));
	}

	CHECK(map_matrix.rows() == 2, eshape);
	CHECK(map_matrix.columns() == 2, eshape);

	CvMat* cvmap_matrix = cvCreateMat(2, 3, image_details::cv_type<double>());
	cvmSet(cvmap_matrix, 0, 0, map_matrix(0, 0));
	cvmSet(cvmap_matrix, 0, 1, map_matrix(0, 1));
	cvmSet(cvmap_matrix, 0, 2, map_matrix(0, 2));
	cvmSet(cvmap_matrix, 1, 0, map_matrix(1, 0));
	cvmSet(cvmap_matrix, 1, 1, map_matrix(1, 1));
	cvmSet(cvmap_matrix, 1, 2, map_matrix(1, 2));

	/*cvmap_matrix->data.db[0] = map_matrix(0, 0);
	cvmap_matrix->data.db[1] = map_matrix(0, 1);
	cvmap_matrix->data.db[2] = map_matrix(0, 2);
	cvmap_matrix->data.db[3] = map_matrix(1, 0);
	cvmap_matrix->data.db[4] = map_matrix(1, 1);
	cvmap_matrix->data.db[5] = map_matrix(1, 2);*/

	cvGetQuadrangleSubPix(src, dst, cvmap_matrix);

	typename image<T, D>::create_new r(dst);

	cvReleaseMat(&cvmap_matrix);
	cvReleaseImage(&src);
	cvReleaseImage(&dst);
	return r;
}
예제 #10
0
int main( int argc, char** argv )
{
    IplImage* src = cvLoadImage("imag.jpg", 1);    
    IplImage* Welcome = cvCloneImage( src );

    int delta = 1;
    int angle = 0;
    int opt;
	printf("\nEnter 1 to Zoom & Rotate 0 to rotate only\n");
	scanf("%d",&opt);  						// 1: rotate & zoom
               						// 0:  rotate only
    double factor;

    for(;;)
    {
    float m[6];
    CvMat M = cvMat(2, 3, CV_32F, m);
    int w = src->width;
    int h = src->height;

    if(opt)  
        factor = (cos(angle*CV_PI/180.) + 1.05) * 2;
    else 
        factor = 1;
    m[0] = (float)(factor*cos(-angle*2*CV_PI/180.));
    m[1] = (float)(factor*sin(-angle*2*CV_PI/180.));
    m[3] = -m[1];
    m[4] = m[0];
    m[2] = w*0.5f;  
    m[5] = h*0.5f;  

    cvGetQuadrangleSubPix( src, Welcome, &M);
    cvNamedWindow("Welcome", 1);
    cvShowImage("Welcome", Welcome);
    if( cvWaitKey(1) == 27 )
        break;
    angle =(int)(angle + delta) % 360;
    }     
    return 0;
}
void CameraSimulator::ProcessImage() {
	// Copy the section of the image into thhe publish image
	float angle = th;
	// little rotation matrix
	m[0] = (float) (cos(-angle));
	m[1] = (float) (sin(-angle));
	m[3] = -m[1];
	m[4] = m[0];
	// and the translation (centre point from original image)
	m[2] = image.width() * 0.5f + x * 100.0;
	m[5] = image.height() * 0.5f + y * 100.0;

	cvGetQuadrangleSubPix(image, (*imageOutput), &M);

	int pt = 0;
	for (int y = 0; y < imageOutput->height(); y++) {
		for (int x = 0; x < imageOutput->width(); x++) {
			data.image[pt] = ((*imageOutput)[y][x]).r;
			pt++;
			data.image[pt] = (((*imageOutput)[y][x])).g;
			pt++;
			data.image[pt] = (((*imageOutput)[y][x])).b;
			pt++;
		}
	}
	//	  std::cout << "Camera sim almost ready...\n";
	this->data.image_count = (imageOutput->width()) * (imageOutput->height())
			* 3;
	this->data.width = imageOutput->width();
	this->data.height = imageOutput->height();
	this->data.bpp = 24;
	this->data.format = PLAYER_CAMERA_FORMAT_RGB888;
	this->data.compression = PLAYER_CAMERA_COMPRESS_RAW;

	// Process the unwarped image into the ouput space
	Publish(device_addr, PLAYER_MSGTYPE_DATA, PLAYER_CAMERA_DATA_STATE, (void*) &this->data, 0, &this->camera_time);
}
예제 #12
0
int MatchWithAngle(const IplImage* pTemplate,
				   const IplImage* pSource,
				   double dAngle,
				   double dStep,
				   ShiftValue& iShift)
{
	// check input params
	if (pTemplate == NULL || pSource == NULL)
	{
		printf("MatchWithAngle input image is NULL!\n");
		return -1;
	}
	if (dAngle < 0 || dStep < 0 || dStep > dAngle)
	{
		printf("MatchWithAngle input angle and step is error!\n");
	}
	CvSize iTemplateSize = cvGetSize(pTemplate);
	CvSize iSourceSize = cvGetSize(pSource);
	if (iTemplateSize.width > iSourceSize.width ||
		iTemplateSize.height > iSourceSize.height)
	{
		printf("MatchWithAngle input image size is error!\n");
		return -1;
	}
	// check input params ends
	
	IplImage* pRotateImage = cvCloneImage(pSource);
	// ratete mat used in cvGetQuadrangleSubPix()
	CvMat* pRotateMat = cvCreateMat(2, 3, CV_32FC1);
	
	// cvMatchTemplate result mat
	CvMat* pMatchedResult 
			= cvCreateMat(iSourceSize.height - iTemplateSize.height + 1,
						  iSourceSize.width - iTemplateSize.width + 1,
					  	  CV_32FC1);
									 
	CvPoint iMinPos, iMaxPos;
	double dMinValue, dMaxValue;
	double dCmpValue = -1;
	// get the best matching result in this loop
	for (double dA = -dAngle; dA <dAngle + dStep; dA += dStep)
	{
		// make sure the angle is not larger than dAngle
		if (dA > dAngle)
		{
			dA = dAngle;
		}
		
		// get the rotate mat
		double dCos = cos(dA * CV_PI / 180.0);
		double dSin = sin(dA * CV_PI / 180.0);
		cvmSet(pRotateMat, 0, 0, dCos);
		cvmSet(pRotateMat, 0, 1, dSin);
		cvmSet(pRotateMat, 0, 2, iSourceSize.width / 2);
		cvmSet(pRotateMat, 1, 0, -dSin);
		cvmSet(pRotateMat, 1, 1, dCos);
		cvmSet(pRotateMat, 1, 2, iSourceSize.height / 2);
		
		// ratete the source image whit angle dA
		cvGetQuadrangleSubPix(pSource, pRotateImage, pRotateMat);
		
		// get the best match
		cvMatchTemplate(pRotateImage,
						pTemplate,
						pMatchedResult,
						CV_TM_CCOEFF_NORMED);
		cvMinMaxLoc(pMatchedResult,
					&dMinValue,
					&dMaxValue,
					&iMinPos,
					&iMaxPos,
					NULL);
		
		if (dCmpValue < dMaxValue)
		{
			dCmpValue = dMaxValue;
			iShift.dX = iMaxPos.x;
			iShift.dY = iMaxPos.y;
			iShift.dAngle = dA;
		}
		
		/*
		if (dCmpValue > dMinValue)
		{
			dCmpValue = dMinValue;
			iShift.dX = iMinPos.x;
			iShift.dY = iMinPos.y;
			iShift.dAngle = dA;
		}
		*/
		
	}
	
	iShift.dX += iTemplateSize.width / 2 - iSourceSize.width / 2;
	iShift.dY += iTemplateSize.height / 2 - iSourceSize.height / 2;
	
	cvReleaseImage(&pRotateImage);
	cvReleaseMat(&pRotateMat);
	cvReleaseMat(&pRotateMat);
	
	return 1;
}
예제 #13
0
IplImage* cvTestSeqQueryFrame(CvTestSeq* pTestSeq)
{
    CvTestSeq_*     pTS = (CvTestSeq_*)pTestSeq;
    CvTestSeqElem*  p = pTS->pElemList;
    IplImage*       pImg = pTS->pImg;
    IplImage*       pImgAdd = cvCloneImage(pTS->pImg);
    IplImage*       pImgAddG = cvCreateImage(cvSize(pImgAdd->width,pImgAdd->height),IPL_DEPTH_8U,1);
    IplImage*       pImgMask = pTS->pImgMask;
    IplImage*       pImgMaskAdd = cvCloneImage(pTS->pImgMask);
    CvMat*          pT = cvCreateMat(2,3,CV_32F);

    if(pTS->CurFrame >= pTS->FrameNum) return NULL;
    cvZero(pImg);
    cvZero(pImgMask);

    for(p=pTS->pElemList; p; p=p->next)
    {
        int             DirectCopy = FALSE;
        int             frame = pTS->CurFrame - p->FrameBegin;
        //float           t = p->FrameNum>1?((float)frame/(p->FrameNum-1)):0;
        CvTSTrans*      pTrans = p->pTrans + frame%p->TransNum;

        assert(pTrans);

        if( p->FrameNum > 0 && (frame < 0 || frame >= p->FrameNum) )
        {   /* Current frame is out of range: */
            //if(p->pAVI)cvReleaseCapture(&p->pAVI);
            p->pAVI = NULL;
            continue;
        }

        cvZero(pImgAdd);
        cvZero(pImgAddG);
        cvZero(pImgMaskAdd);

        if(p->noise_type == CV_NOISE_NONE)
        {   /* For not noise:  */
            /* Get next frame: */
            icvTestSeqQureyFrameElem(p, frame);
            if(p->pImg == NULL) continue;

#if 1 /* transform using T filed in Trans */
            {   /* Calculate transform matrix: */
                float   W = (float)(pImgAdd->width-1);
                float   H = (float)(pImgAdd->height-1);
                float   W0 = (float)(p->pImg->width-1);
                float   H0 = (float)(p->pImg->height-1);
                cvZero(pT);
                {   /* Calcualte inverse matrix: */
                    CvMat   mat = cvMat(2,3,CV_32F, pTrans->T);
                    mat.width--;
                    pT->width--;
                    cvInvert(&mat, pT);
                    pT->width++;
                }

                CV_MAT_ELEM(pT[0], float, 0, 2) =
                    CV_MAT_ELEM(pT[0], float, 0, 0)*(W0/2-pTrans->T[2])+
                    CV_MAT_ELEM(pT[0], float, 0, 1)*(H0/2-pTrans->T[5]);

                CV_MAT_ELEM(pT[0], float, 1, 2) =
                    CV_MAT_ELEM(pT[0], float, 1, 0)*(W0/2-pTrans->T[2])+
                    CV_MAT_ELEM(pT[0], float, 1, 1)*(H0/2-pTrans->T[5]);

                CV_MAT_ELEM(pT[0], float, 0, 0) *= W0/W;
                CV_MAT_ELEM(pT[0], float, 0, 1) *= H0/H;
                CV_MAT_ELEM(pT[0], float, 1, 0) *= W0/W;
                CV_MAT_ELEM(pT[0], float, 1, 1) *= H0/H;

            }   /* Calculate transform matrix. */
#else
            {   /* Calculate transform matrix: */
                float   SX = (float)(p->pImg->width-1)/((pImgAdd->width-1)*pTrans->Scale.x);
                float   SY = (float)(p->pImg->height-1)/((pImgAdd->height-1)*pTrans->Scale.y);
                float   DX = pTrans->Shift.x;
                float   DY = pTrans->Shift.y;;
                cvZero(pT);
                ((float*)(pT->data.ptr+pT->step*0))[0]=SX;
                ((float*)(pT->data.ptr+pT->step*1))[1]=SY;
                ((float*)(pT->data.ptr+pT->step*0))[2]=SX*(pImgAdd->width-1)*(0.5f-DX);
                ((float*)(pT->data.ptr+pT->step*1))[2]=SY*(pImgAdd->height-1)*(0.5f-DY);
            }   /* Calculate transform matrix. */
#endif


            {   /* Check for direct copy: */
                DirectCopy = TRUE;
                if( fabs(CV_MAT_ELEM(pT[0],float,0,0)-1) > 0.00001) DirectCopy = FALSE;
                if( fabs(CV_MAT_ELEM(pT[0],float,1,0)) > 0.00001) DirectCopy = FALSE;
                if( fabs(CV_MAT_ELEM(pT[0],float,0,1)) > 0.00001) DirectCopy = FALSE;
                if( fabs(CV_MAT_ELEM(pT[0],float,0,1)) > 0.00001) DirectCopy = FALSE;
                if( fabs(CV_MAT_ELEM(pT[0],float,0,2)-(pImg->width-1)*0.5) > 0.5) DirectCopy = FALSE;
                if( fabs(CV_MAT_ELEM(pT[0],float,1,2)-(pImg->height-1)*0.5) > 0.5) DirectCopy = FALSE;
            }

            /* Extract image and mask: */
            if(p->pImg->nChannels == 1)
            {
                if(DirectCopy)
                {
                    cvCvtColor( p->pImg,pImgAdd,CV_GRAY2BGR);
                }
                else
                {
                    cvGetQuadrangleSubPix( p->pImg, pImgAddG, pT);
                    cvCvtColor( pImgAddG,pImgAdd,CV_GRAY2BGR);
                }
            }

            if(p->pImg->nChannels == 3)
            {
                if(DirectCopy)
                    cvCopy(p->pImg, pImgAdd);
                else
                    cvGetQuadrangleSubPix( p->pImg, pImgAdd, pT);
            }

            if(p->pImgMask)
            {
                if(DirectCopy)
                    cvCopy(p->pImgMask, pImgMaskAdd);
                else
                    cvGetQuadrangleSubPix( p->pImgMask, pImgMaskAdd, pT);

                cvThreshold(pImgMaskAdd,pImgMaskAdd,128,255,CV_THRESH_BINARY);
            }

            if(pTrans->C != 1 || pTrans->I != 0)
            {   /* Intensity transformation: */
                cvScale(pImgAdd, pImgAdd, pTrans->C,pTrans->I);
            }   /* Intensity transformation: */

            if(pTrans->GN > 0)
            {   /* Add noise: */
                IplImage* pImgN = cvCloneImage(pImgAdd);
                cvRandSetRange( &p->rnd_state, pTrans->GN, 0, -1 );
                cvRand(&p->rnd_state, pImgN);
                cvAdd(pImgN,pImgAdd,pImgAdd);
                cvReleaseImage(&pImgN);
            }   /* Add noise. */

            if(p->Mask)
            {   /* Update only mask: */
                cvOr(pImgMaskAdd, pImgMask, pImgMask);
            }
            else
            {   /* Add image and mask to exist main image and mask: */
                if(p->BG)
                {   /* If image is background: */
                    cvCopy( pImgAdd, pImg, NULL);
                }
                else
                {   /* If image is foreground: */
                    cvCopy( pImgAdd, pImg, pImgMaskAdd);
                    if(p->ObjID>=0)
                        cvOr(pImgMaskAdd, pImgMask, pImgMask);
                }
            }   /* Not mask. */
        }   /*  For not noise. */
        else
        {   /* Process noise video: */

            if( p->noise_type == CV_NOISE_GAUSSIAN ||
예제 #14
0
void EyeExtractor::extractEye(const IplImage *origimage) 
    throw (TrackingException) 
{
    static int frame_no = 1;
    string file;
    char buffer [100];

    //if (!tracker.status[tracker.eyepoint1])
	//throw TrackingException();



    double x0 = tracker.currentpoints[tracker.eyepoint1].x;
    double y0 = tracker.currentpoints[tracker.eyepoint1].y;
    double x1 = tracker.currentpoints[tracker.eyepoint2].x;
    double y1 = tracker.currentpoints[tracker.eyepoint2].y;

    double dh = sqrt(pow((x1-x0),2) + pow((y1-y0),2)); 
    double dx = x1-x0;
    double dy = y1-y0;
    
    double alpha = atan2(dy, dx);// * 180 / PI;

    x0 -= dh/30*sin(alpha);
    y0 += dh/30*cos(alpha);
    x1 -= dh/30*sin(alpha);
    y1 += dh/30*cos(alpha);

    double factor = 0.17;
    double xfactor = 0.05;
    double yfactor = 0.20 * (x0 < x1 ? -1 : 1);
    double L = factor / eyedx;
    double LL = x0 < x1? L : -L;
    float matrix[6] = 
	{LL*(x1-x0), LL*(y0-y1), 
	 x0 + factor * ((1-xfactor)*(x1-x0) + yfactor * (y0-y1)),
	 LL*(y1-y0), LL*(x1-x0), 
	 y0 + factor * ((1-xfactor)*(y1-y0) + yfactor * (x1-x0))};
    CvMat M = cvMat( 2, 3, CV_32F, matrix );

    cvGetQuadrangleSubPix( origimage, eyeimage.get(), &M);
    cvCvtColor(eyeimage.get(), eyegrey.get(), CV_RGB2GRAY);

/*
// ------------------ Arcadi -------------------

    //if (saveImage == true){
        cout << "SAVING IMAGES" << endl;
	    file=sprintf (buffer, "../Images/Colour/Eye_Image_%d.jpg", frame_no);

	    cvSaveImage(buffer, eyeimage.get());

	    file=sprintf (buffer, "../Images/Grey/Eye_Image_%d.jpg", frame_no);

	    cvSaveImage(buffer, eyegrey.get());

        frame_no++;
	//}

// ---------------------------------------------
*/
	extractLeftEye(origimage, x0, y0, x1, y1);
	
    processEye();
}