Example #1
0
void margBlobInterpolator :: makeInterpolated(vector<margBlob> current) {
	
	if (prevBlobs.size() == 0) {
		// If previous blobs ARE NOT saved. Save current and return them.
		prevBlobs = current;
	}
	else {
		int nBlobs = current.size();
		vector<margBlob> currentCopy = current;
		current.clear();
		
		// If previous blobs ARE saved, continue and check for good pairs
		int* pairs = findPairs(currentCopy);
		for (int i = 0; i < nBlobs; i++) {
			if (pairs[i] >= 0) {
				if (prevBlobs[pairs[i]].pts.size() > 0) {
					vector<margBlob> interBlobs = interpolate(prevBlobs[pairs[i]], currentCopy[i]);
					for (int j = 0; j < interBlobs.size(); j++) {
						current.push_back(interBlobs[j]);
					}
				}
			}
			else {
				current.push_back(currentCopy[i]);
			}
		}
		
		prevBlobs.clear();
		prevBlobs = currentCopy;
		finalBlobs.clear();
		finalBlobs= current;
		if(pairs != 0) delete[] pairs;
	}
	
}
/* a rough implementation for object location */
int
locatePlanarObject( const CvSeq* objectKeypoints, const CvSeq* objectDescriptors,
                    const CvSeq* imageKeypoints, const CvSeq* imageDescriptors,
                    const CvPoint src_corners[4], CvPoint dst_corners[4] )
{

    double h[9];
    CvMat _h = cvMat(3, 3, CV_64F, h);
    // vector<int> ptpairs;
    // vector<CvPoint2D32f> pt1, pt2;
    struct pairList * ptpairs = 0;
    CvMat _pt1, _pt2;
    int i, n;

    findPairs( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, ptpairs );

    n = (int)(ptpairs->currentItems);
    if( n < 4 )
        return 0;

   struct pair * pairA = (struct pair *) malloc(sizeof(struct pair) * n );
   struct pair * pairB = (struct pair *) malloc(sizeof(struct pair) * n );

    for( i = 0; i < n; i+=2 )
    {
       //   pairA[i].p1 = ptpairs[i].p1; //  ((CvSURFPoint*)cvGetSeqElem(objectKeypoints,ptpairs[i*2]))->pt;
       //   pairA[i].p2 = ptpairs[i].p2; //  ((CvSURFPoint*)cvGetSeqElem(objectKeypoints,ptpairs[i*2]))->pt;

       //   pairB[i].p1 = ptpairs[i+1].p1; //  ((CvSURFPoint*)cvGetSeqElem(imageKeypoints,ptpairs[i*2+1]))->pt;
       //   pairB[i].p2 = ptpairs[i+1].p2; //  ((CvSURFPoint*)cvGetSeqElem(imageKeypoints,ptpairs[i*2+1]))->pt;
    }

    _pt1 = cvMat(1, n, CV_32FC2, pairA );
    _pt2 = cvMat(1, n, CV_32FC2, pairB );
    if( !cvFindHomography( &_pt1, &_pt2, &_h, CV_RANSAC, 5 , 0 ))  { return 0; }


    for( i = 0; i < 4; i++ )
    {
        double x = src_corners[i].x, y = src_corners[i].y;
        double Z = 1./(h[6]*x + h[7]*y + h[8]);
        double X = (h[0]*x + h[1]*y + h[2])*Z;
        double Y = (h[3]*x + h[4]*y + h[5])*Z;
        dst_corners[i] = cvPoint(cvRound(X), cvRound(Y));
    }

    return 1;
}
/* a rough implementation for object location */
int
locatePlanarObject( const CvSeq* objectKeypoints, const CvSeq* objectDescriptors,
                    const CvSeq* imageKeypoints, const CvSeq* imageDescriptors,
                    const CvPoint src_corners[4], CvPoint dst_corners[4] )
{
    double h[9];
    CvMat _h = cvMat(3, 3, CV_64F, h);
    vector<int> ptpairs;
    vector<CvPoint2D32f> pt1, pt2;
    CvMat _pt1, _pt2;
    int i, n;

#ifdef USE_FLANN
    flannFindPairs( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, ptpairs );
#else
    findPairs( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, ptpairs );
#endif

    n = ptpairs.size()/2;
    if( n < 4 )
        return 0;

    pt1.resize(n);
    pt2.resize(n);
    for( i = 0; i < n; i++ )
    {
        pt1[i] = ((CvSURFPoint*)cvGetSeqElem(objectKeypoints,ptpairs[i*2]))->pt;
        pt2[i] = ((CvSURFPoint*)cvGetSeqElem(imageKeypoints,ptpairs[i*2+1]))->pt;
    }

    _pt1 = cvMat(1, n, CV_32FC2, &pt1[0] );
    _pt2 = cvMat(1, n, CV_32FC2, &pt2[0] );
    if( !cvFindHomography( &_pt1, &_pt2, &_h, CV_RANSAC, 5 ))
        return 0;

    for( i = 0; i < 4; i++ )
    {
        double x = src_corners[i].x, y = src_corners[i].y;
        double Z = 1./(h[6]*x + h[7]*y + h[8]);
        double X = (h[0]*x + h[1]*y + h[2])*Z;
        double Y = (h[3]*x + h[4]*y + h[5])*Z;
        dst_corners[i] = cvPoint(cvRound(X), cvRound(Y));
    }

    return 1;
}
int openCV_SURFDetector(struct Image * pattern,struct Image * img)
{
   StartTimer(FIND_OBJECTS_DELAY);

    monochrome(img);
    IplImage  * image = cvCreateImage( cvSize(img->width,img->height), IPL_DEPTH_8U, img->depth);
    char * opencvImagePointerRetainer = image->imageData; // UGLY HACK
    image->imageData = (char*) img->pixels; // UGLY HACK
    //cvCvtColor( image, image, CV_RGB2GRAY);

    monochrome(pattern);
    IplImage  * object = cvCreateImage( cvSize(pattern->width,pattern->height), IPL_DEPTH_8U, pattern->depth);
    char * opencvObjectPointerRetainer = object->imageData; // UGLY HACK
    object->imageData = (char*) pattern->pixels; // UGLY HACK
    //cvCvtColor( object, object, CV_RGB2GRAY);


    CvMemStorage* storage = cvCreateMemStorage(0);
    static CvScalar colors[] = { {{0,0,255}}, {{0,128,255}}, {{0,255,255}}, {{0,255,0}}, {{255,128,0}}, {{255,255,0}}, {{255,0,0}}, {{255,0,255}}, {{255,255,255}} };

    IplImage* object_color = cvCreateImage(cvGetSize(object), 8, 3);
    cvCvtColor( object, object_color, CV_GRAY2BGR );

    CvSeq* objectKeypoints = 0, *objectDescriptors = 0;
    CvSeq* imageKeypoints = 0, *imageDescriptors = 0;
    int i;
    CvSURFParams params = cvSURFParams(500, 1);

    double tt = (double)cvGetTickCount();

    cvExtractSURF( object, 0, &objectKeypoints, &objectDescriptors, storage, params , 0 );
    //printf("Object Descriptors: %d\n", objectDescriptors->total);

    cvExtractSURF( image, 0, &imageKeypoints, &imageDescriptors, storage, params , 0 );
    //printf("Image Descriptors: %d\n", imageDescriptors->total);
    tt = (double)cvGetTickCount() - tt;

    //printf( "Extraction time = %gms\n", tt/(cvGetTickFrequency()*1000.));

    CvPoint src_corners[4] = {{0,0}, {object->width,0}, {object->width, object->height}, {0, object->height}};
    CvPoint dst_corners[4];

    //IplImage* correspond = cvCreateImage( cvSize(image->width, object->height+image->height), 8, 1 );
    //cvSetImageROI( correspond, cvRect( 0, 0, object->width, object->height ) );
    //cvCopy( object, correspond , 0 );
    //cvSetImageROI( correspond, cvRect( 0, object->height, correspond->width, correspond->height ) );
    //cvCopy( image, correspond , 0 );
    //cvResetImageROI( correspond );



    if( locatePlanarObject( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, src_corners, dst_corners ))
    {
        for( i = 0; i < 4; i++ )
        {
            CvPoint r1 = dst_corners[i%4];
            CvPoint r2 = dst_corners[(i+1)%4];
            //cvLine( correspond, cvPoint(r1.x, r1.y+object->height ), cvPoint(r2.x, r2.y+object->height ), colors[8] , 1 ,8 ,0  );
        }
    }

    struct pairList * ptpairs = 0;
    findPairs( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, ptpairs );


    printf(" Found %u pairs \n",(int) ptpairs->currentItems);



    image->imageData = opencvImagePointerRetainer; // UGLY HACK
    cvReleaseImage( &image );

    image->imageData = opencvObjectPointerRetainer; // UGLY HACK
    cvReleaseImage( &object );

    EndTimer(FIND_OBJECTS_DELAY);
/*

    for( i = 0; i < (int)ptpairs->currentItems; i++ )
    {
        CvSURFPoint* r1 = (CvSURFPoint*)cvGetSeqElem( objectKeypoints, ptpairs->item[i].p1; );
        CvSURFPoint* r2 = (CvSURFPoint*)cvGetSeqElem( imageKeypoints,  ptpairs->item[i].p2; );
        cvLine( correspond, cvPointFrom32f(r1->pt),
            cvPoint(cvRound(r2->pt.x), cvRound(r2->pt.y+object->height)), colors[8] );
    }
    cvShowImage( "Object Correspond", correspond );
    for( i = 0; i < objectKeypoints->total; i++ )
    {
        CvSURFPoint* r = (CvSURFPoint*)cvGetSeqElem( objectKeypoints, i );
        CvPoint center;
        int radius;
        center.x = cvRound(r->pt.x);
        center.y = cvRound(r->pt.y);
        radius = cvRound(r->size*1.2/9.*2);
        cvCircle( object_color, center, radius, colors[0], 1, 8, 0 );
    }
    cvShowImage( "Object", object_color );

    cvWaitKey(0);

    cvDestroyWindow("Object");
    cvDestroyWindow("Object Correspond");
*/
    return 1;
}
Example #5
0
int  sift_affine(const char * filenameLeft , const char * filenameRight ,  double SIFTThreshold ,
                 unsigned int RANSACLoops ,
                 unsigned int stitchedBorder ,
                 double reprojectionThresholdX ,
                 double reprojectionThresholdY ,
                 unsigned int useOpenCVEstimator
                 )
{

    fprintf(stderr,"Running SIFT on %s / %s \n" , filenameLeft , filenameRight);

    cv::Mat left = cv::imread(filenameLeft  , CV_LOAD_IMAGE_COLOR);
    if(! left.data ) { fprintf(stderr,"Left Image missing \n"); return 1; }

    cv::Mat right = cv::imread(filenameRight, CV_LOAD_IMAGE_COLOR);
    if(! right.data ) { fprintf(stderr,"Right Image missing \n"); return 1; }


    cv::DescriptorExtractor* extractor = new cv::SiftDescriptorExtractor();
    cv::SiftFeatureDetector detector;
    std::vector<cv::KeyPoint> keypointsLeft;
    cv::Mat descriptorsLeft;
    detector.detect(left, keypointsLeft);
    extractor->compute(left, keypointsLeft, descriptorsLeft);

    // Add results to image and save.
    cv::Mat output;
    cv::drawKeypoints(left, keypointsLeft, output);
    cv::imwrite("sift_features_left.jpg", output);


    std::vector<cv::KeyPoint> keypointsRight;
    cv::Mat descriptorsRight;
    detector.detect(right, keypointsRight);
    extractor->compute(right, keypointsRight, descriptorsRight);
    cv::drawKeypoints(right, keypointsRight, output);
    cv::imwrite("sift_features_right.jpg", output);

    //fprintf(stderr,"SIFT features ready \n");


    std::vector<cv::Point2f> srcRANSACPoints;
    std::vector<cv::Point2f> dstRANSACPoints;


    std::vector<cv::Point2f> srcPoints;
    std::vector<cv::Point2f> dstPoints;
    findPairs( SIFTThreshold , keypointsLeft, descriptorsLeft, keypointsRight, descriptorsRight, srcPoints, dstPoints);
    //printf("%zd keypoints are matched.\n", srcPoints.size());



   visualizeMatches(
                      "sift_initial_match.jpg" ,
                      left ,
                      keypointsLeft,
                      descriptorsLeft,
                      right ,
                      keypointsRight,
                      descriptorsRight,
                      srcPoints,
                      dstPoints,
                      srcRANSACPoints,
                      dstRANSACPoints
                    );


   cv::Mat warp_mat( 2, 3,  CV_64FC1  );
   double M[6]={0};
   fitAffineTransformationMatchesRANSAC( RANSACLoops , reprojectionThresholdX , reprojectionThresholdY , M , warp_mat, srcPoints , dstPoints ,  srcRANSACPoints, dstRANSACPoints);


   stitchAffineMatch(
                     "wrappedAffine.jpg"  ,
                     stitchedBorder,
                     left ,
                     right ,
                     warp_mat
                    );


   visualizeMatches(
                      "sift_affine_match.jpg" ,
                      left ,
                      keypointsLeft,
                      descriptorsLeft,
                      right ,
                      keypointsRight,
                      descriptorsRight,
                      srcPoints,
                      dstPoints ,
                      srcRANSACPoints,
                      dstRANSACPoints
                    );




   cv::Mat homo_mat( 3, 3,  CV_64FC1  );
   double H[9]={0};

   if (useOpenCVEstimator)
   {
    homo_mat = cv::findHomography(srcPoints , dstPoints , CV_RANSAC);
   } else
   {
    fitHomographyTransformationMatchesRANSAC( RANSACLoops , reprojectionThresholdX , reprojectionThresholdY , H , homo_mat, srcPoints , dstPoints ,  srcRANSACPoints, dstRANSACPoints);
   }


   stitchHomographyMatch(
                         "wrappedHomography.jpg"  ,
                         stitchedBorder,
                         left ,
                         right ,
                         homo_mat
                        );

   visualizeMatches(
                      "sift_homography_match.jpg" ,
                      left ,
                      keypointsLeft,
                      descriptorsLeft,
                      right ,
                      keypointsRight,
                      descriptorsRight,
                      srcPoints,
                      dstPoints ,
                      srcRANSACPoints,
                      dstRANSACPoints
                    );
}
Example #6
0
// chain function; this function does the actual processing
static GstFlowReturn
gst_surf_tracker_chain(GstPad *pad, GstBuffer *buf) {
    GstSURFTracker *filter;
    GstClockTime    timestamp;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_SURF_TRACKER(GST_OBJECT_PARENT(pad));
    filter->image->imageData = (char*) GST_BUFFER_DATA(buf);

    // Create the gray image for the surf 'features' search process
    cvCvtColor(filter->image, filter->gray, CV_BGR2GRAY);
    ++filter->frames_processed;
    timestamp = GST_BUFFER_TIMESTAMP(buf);

    // If exist stored_objects: search matching, update, cleaning
    if ((filter->stored_objects != NULL) && (filter->stored_objects->len > 0)) {
        CvMemStorage *surf_image_mem_storage;
        CvSeq        *surf_image_keypoints, *surf_image_descriptors;
        guint         i;
        gint          j;

        // Update the match set 'features' for each object
        surf_image_mem_storage = cvCreateMemStorage(0);

        // Search 'features' in full image
        surf_image_keypoints = surf_image_descriptors = NULL;
        cvExtractSURF(filter->gray, NULL, &surf_image_keypoints, &surf_image_descriptors,
                      surf_image_mem_storage, filter->params, 0);

        for (i = 0; i < filter->stored_objects->len; ++i) {
            InstanceObject *object;
            GArray         *pairs;

            object = &g_array_index(filter->stored_objects, InstanceObject, i);
            pairs  = g_array_new(FALSE, FALSE, sizeof(IntPair));

            findPairs(object->surf_object_keypoints, object->surf_object_descriptors,
                      surf_image_keypoints, surf_image_descriptors, pairs);

            // if match, update object
            if (pairs->len && (float) pairs->len / object->surf_object_descriptors->total >= MIN_MATCH_OBJECT) {
                object->range_viewed++;
                object->last_frame_viewed = filter->frames_processed;
                object->timestamp         = timestamp;

                if (object->surf_object_keypoints_last_match != NULL)
                    cvClearSeq(object->surf_object_keypoints_last_match);
                object->surf_object_keypoints_last_match = getMatchPoints(surf_image_keypoints, pairs, 1, object->mem_storage);

                if (object->surf_object_descriptors_last_match != NULL)
                    cvClearSeq(object->surf_object_descriptors_last_match);
                object->surf_object_descriptors_last_match = getMatchPoints(surf_image_descriptors, pairs, 1, object->mem_storage);

                // Estimate rect of objects localized
                object->rect_estimated = rectDisplacement(object->surf_object_keypoints, surf_image_keypoints, pairs, object->rect, PAIRS_PERC_CONSIDERATE);
            }

            g_array_free(pairs, TRUE);
        }

        if (surf_image_keypoints != NULL) cvClearSeq(surf_image_keypoints);
        if (surf_image_descriptors != NULL) cvClearSeq(surf_image_descriptors);
        cvReleaseMemStorage(&surf_image_mem_storage);

        // Clean old objects
        for (j = filter->stored_objects->len - 1; j >= 0; --j) {
            InstanceObject *object;

            object = &g_array_index(filter->stored_objects, InstanceObject, j);
            if ((filter->frames_processed - object->last_frame_viewed > DELOBJ_NFRAMES_IS_OLD) ||
                (filter->frames_processed != object->last_frame_viewed && object->range_viewed < DELOBJ_COMBOFRAMES_IS_IRRELEVANT)) {
                if (object->surf_object_keypoints != NULL) cvClearSeq(object->surf_object_keypoints);
                if (object->surf_object_descriptors != NULL) cvClearSeq(object->surf_object_descriptors);
                if (object->surf_object_keypoints_last_match != NULL) cvClearSeq(object->surf_object_keypoints_last_match);
                if (object->surf_object_descriptors_last_match != NULL) cvClearSeq(object->surf_object_descriptors_last_match);
                cvReleaseMemStorage(&object->mem_storage);
                g_array_remove_index_fast(filter->stored_objects, j);
            }
        }

    } // if any object exist

    // Process all haar rects
    if ((filter->rect_array != NULL) && (filter->rect_array->len > 0)) {
        guint i, j;

        for (i = 0; i < filter->rect_array->len; ++i) {
            CvRect rect = g_array_index(filter->rect_array, CvRect, i);

            // If already exist in 'stored_objects', update features. Else save
            // as new.
            for (j = 0; j < filter->stored_objects->len; ++j) {
                InstanceObject *object;

                object = &g_array_index(filter->stored_objects, InstanceObject, j);

                // It is considered equal if the "centroid match features" is inner
                // haar rect AND max area deviation is PERC_RECT_TO_SAME_OBJECT
                if (pointIntoRect(rect, (object->surf_object_keypoints_last_match != NULL) ? surfCentroid(object->surf_object_keypoints_last_match, cvPoint(0, 0)) : surfCentroid(object->surf_object_keypoints, cvPoint(0, 0))) &&
                    ((float) MIN((object->rect.width * object->rect.height), (rect.width * rect.height)) / (float) MAX((object->rect.width * object->rect.height), (rect.width * rect.height)) >= PERC_RECT_TO_SAME_OBJECT)) {

                    // Update the object features secound the new body rect
                    cvSetImageROI(filter->gray, rect);
                    cvExtractSURF(filter->gray, NULL, &object->surf_object_keypoints, &object->surf_object_descriptors,
                                  object->mem_storage, filter->params, 0);
                    cvResetImageROI(filter->gray);
                    object->rect = object->rect_estimated = rect;
                    object->last_body_identify_timestamp = timestamp;

                    break;
                }
            }

            // If new, create object and append in stored_objects
            if (j >= filter->stored_objects->len) {
                InstanceObject object;

                object.surf_object_keypoints   = 0;
                object.surf_object_descriptors = 0;
                object.mem_storage             = cvCreateMemStorage(0);

                cvSetImageROI(filter->gray, rect);
                cvExtractSURF(filter->gray, NULL, &object.surf_object_keypoints, &object.surf_object_descriptors,
                              object.mem_storage, filter->params, 0);
                cvResetImageROI(filter->gray);

                if (object.surf_object_descriptors && object.surf_object_descriptors->total > 0) {
                    object.id                                 = filter->static_count_objects++;
                    object.last_frame_viewed                  = filter->frames_processed;
                    object.range_viewed                       = 1;
                    object.rect                               = object.rect_estimated               = rect;
                    object.timestamp                          = object.last_body_identify_timestamp = timestamp;
                    object.surf_object_keypoints_last_match   = NULL;
                    object.surf_object_descriptors_last_match = NULL;

                    g_array_append_val(filter->stored_objects, object);
                }
            } // new
        }
    }

    // Put the objects found in the frame in gstreamer pad
    if ((filter->stored_objects != NULL) && (filter->stored_objects->len > 0)) {
        guint i;

        for (i = 0; i < filter->stored_objects->len; ++i) {
            InstanceObject object = g_array_index(filter->stored_objects, InstanceObject, i);

            // 'Continue' whether the object is not found in this frame
            if (object.timestamp == timestamp) {
                TrackedObject *tracked_object;
                GstEvent      *event;
                CvRect         rect;

                rect = ((object.last_body_identify_timestamp == timestamp) ? object.rect : object.rect_estimated);

                if (filter->verbose) {
                    GST_INFO("[object #%d rect] x: %d, y: %d, width: %d, height: %d\n", object.id, rect.x, rect.y, rect.width, rect.height);
                    // drawSurfPoints(object.surf_object_keypoints, cvPoint(object.rect.x, object.rect.y), filter->image, PRINT_COLOR, 0);
                    // drawSurfPoints(object.surf_object_keypoints_last_match, cvPoint(object.rect.x, object.rect.y), filter->image, PRINT_COLOR, 1);
                }

                if (filter->display_features) {
                    drawSurfPoints(object.surf_object_keypoints_last_match, cvPoint(0, 0), filter->image, PRINT_COLOR, 1);
                }

                if (filter->display) {
                    char *label;
                    float font_scaling;

                    font_scaling = ((filter->image->width * filter->image->height) > (320 * 240)) ? 0.5f : 0.3f;

                    cvRectangle(filter->image, cvPoint(rect.x, rect.y), cvPoint(rect.x + rect.width, rect.y + rect.height),
                                PRINT_COLOR, ((object.last_body_identify_timestamp == timestamp) ? 2 : 1), 8, 0);
                    label = g_strdup_printf("OBJ#%i", object.id);
                    printText(filter->image, cvPoint(rect.x + (rect.width / 2), rect.y + (rect.height / 2)), label, PRINT_COLOR, font_scaling, 1);
                    g_free(label);
                }

                // allocate and initialize 'TrackedObject' structure
                tracked_object = tracked_object_new();
                tracked_object->id        = g_strdup_printf("PERSON#%d", object.id);
                tracked_object->type      = TRACKED_OBJECT_DYNAMIC;
                tracked_object->height    = rect.height;
                tracked_object->timestamp = timestamp;

                // add the points that the define the lower part of the object (i.e,
                // the lower horizontal segment of the rectangle) as the objects perimeter
                tracked_object_add_point(tracked_object, rect.x, rect.y + rect.height);
                tracked_object_add_point(tracked_object, rect.x + rect.width, rect.y + rect.height);

                // send downstream event
                event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM,
                                             tracked_object_to_structure(tracked_object, "tracked-object"));
                gst_pad_push_event(filter->srcpad, event);
            }
        }
    }

    // Clean body rects
    g_array_free(filter->rect_array, TRUE);
    filter->rect_array = g_array_sized_new(FALSE, FALSE, sizeof(CvRect), 1);

    // Draw number of objects stored
    if (filter->display) {
        char *label = g_strdup_printf("N_STORED_OBJS: %3i", filter->stored_objects->len);
        printText(filter->image, cvPoint(0, 0), label, PRINT_COLOR, .5, 1);
        g_free(label);
    }

    gst_buffer_set_data(buf, (guint8*) filter->image->imageData, (guint) filter->image->imageSize);
    return gst_pad_push(filter->srcpad, buf);
}
void surf_match(IplImage* object_color, IplImage* object, IplImage* image,const CvSeq *objectKeypoints,const CvSeq *imageKeypoints,const CvSeq * objectDescriptors,const CvSeq * imageDescriptors, CvPoint val[4])
{
    cvNamedWindow("Object", 0);
    cvNamedWindow("Object Correspond", 0);

    static CvScalar colors[] = 
    {
        {{0,0,255}},
        {{0,128,255}},
        {{0,255,255}},
        {{0,255,0}},
        {{255,128,0}},
        {{255,255,0}},
        {{255,0,0}},
        {{255,0,255}},
        {{255,255,255}}
    };

    int i;

	CvPoint src_corners[4] = {{0,0}, {object->width,0}, {object->width, object->height}, {0, object->height}};
    CvPoint dst_corners[4];
    IplImage* correspond = cvCreateImage( cvSize(image->width, object->height+image->height), 8, 1 );
    cvSetImageROI( correspond, cvRect( 0, 0, object->width, object->height ) );
    cvCopy( object, correspond );
    cvSetImageROI( correspond, cvRect( 0, object->height, correspond->width, correspond->height ) );
    cvCopy( image, correspond );
    cvResetImageROI( correspond );

#ifdef USE_FLANN
    printf("Using approximate nearest neighbor search\n");
#endif

    if( locatePlanarObject( objectKeypoints, objectDescriptors, imageKeypoints,
        imageDescriptors, src_corners, dst_corners ))
    {
        for( i = 0; i < 4; i++ )
        {
            CvPoint r1 = dst_corners[i%4];
            CvPoint r2 = dst_corners[(i+1)%4];
            cvLine( correspond, cvPoint(r1.x, r1.y+object->height ),
                cvPoint(r2.x, r2.y+object->height ), colors[8] );
        }
    }
    vector<int> ptpairs;
#ifdef USE_FLANN
    flannFindPairs( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, ptpairs );
#else
    findPairs( objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, ptpairs );
#endif
    for( i = 0; i < (int)ptpairs.size(); i += 2 )
    {
        CvSURFPoint* r1 = (CvSURFPoint*)cvGetSeqElem( objectKeypoints, ptpairs[i] );
        CvSURFPoint* r2 = (CvSURFPoint*)cvGetSeqElem( imageKeypoints, ptpairs[i+1] );
        cvLine( correspond, cvPointFrom32f(r1->pt),
            cvPoint(cvRound(r2->pt.x), cvRound(r2->pt.y+object->height)), colors[8] );
    }

    cvShowImage( "Object Correspond", correspond );
    for( i = 0; i < objectKeypoints->total; i++ )
    {
        CvSURFPoint* r = (CvSURFPoint*)cvGetSeqElem( objectKeypoints, i );
        CvPoint center;
        int radius;
        center.x = cvRound(r->pt.x);
        center.y = cvRound(r->pt.y);
        radius = cvRound(r->size*1.2/9.*2);
        cvCircle( object_color, center, radius, colors[0], 1, 8, 0 );
    }
    cvShowImage( "Object", object_color );

    cvWaitKey(0);

    cvDestroyWindow("Object");
    cvDestroyWindow("Object SURF");
    cvDestroyWindow("Object Correspond");

	//CvPoint val[4];
	for(int k=0;k<4;k++)
	{
//		printf("%d %d \n", dst_corners[k].x, dst_corners[k].y);
		val[k] = dst_corners[k]; 
		val[k] = dst_corners[k]; 
	}

}