Пример #1
0
/*
 * To work with Kinect or XtionPRO the user must install OpenNI library and PrimeSensorModule for OpenNI and
 * configure OpenCV with WITH_OPENNI flag is ON (using CMake).
 */
int main( int argc, char* argv[] )
{
/*
    bool isColorizeDisp, isFixedMaxDisp;
    int imageMode;
    bool retrievedImageFlags[5];
    string filename;
    bool isVideoReading;
    parseCommandLine( argc, argv, isColorizeDisp, isFixedMaxDisp, imageMode, retrievedImageFlags, filename, isVideoReading );

    cout << "Device opening ..." << endl;
    VideoCapture capture;
    if( isVideoReading )
        capture.open( filename );
    else
        capture.open( CV_CAP_OPENNI );

    cout << "done." << endl;

    if( !capture.isOpened() )
    {
        cout << "Can not open a capture object." << endl;
        return -1;
    }

    if( !isVideoReading )
    {
        bool modeRes=false;
        switch ( imageMode )
        {
            case 0:
                modeRes = capture.set( CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_VGA_30HZ );
                break;
            case 1:
                modeRes = capture.set( CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_SXGA_15HZ );
                break;
            case 2:
                modeRes = capture.set( CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_SXGA_30HZ );
                break;
            default:
                CV_Error( CV_StsBadArg, "Unsupported image mode property.\n");
        }
        if (!modeRes)
            cout << "\nThis image mode is not supported by the device, the default value (CV_CAP_OPENNI_SXGA_15HZ) will be used.\n" << endl;
    }

    // Print some avalible device settings.
    cout << "\nDepth generator output mode:" << endl <<
            "FRAME_WIDTH      " << capture.get( CV_CAP_PROP_FRAME_WIDTH ) << endl <<
            "FRAME_HEIGHT     " << capture.get( CV_CAP_PROP_FRAME_HEIGHT ) << endl <<
            "FRAME_MAX_DEPTH  " << capture.get( CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH ) << " mm" << endl <<
            "FPS              " << capture.get( CV_CAP_PROP_FPS ) << endl <<
            "REGISTRATION     " << capture.get( CV_CAP_PROP_OPENNI_REGISTRATION ) << endl;
    if( capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR_PRESENT ) )
    {
        cout <<
            "\nImage generator output mode:" << endl <<
            "FRAME_WIDTH   " << capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FRAME_WIDTH ) << endl <<
            "FRAME_HEIGHT  " << capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FRAME_HEIGHT ) << endl <<
            "FPS           " << capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FPS ) << endl;
    }
    else
    {
        cout << "\nDevice doesn't contain image generator." << endl;
        if (!retrievedImageFlags[0] && !retrievedImageFlags[1] && !retrievedImageFlags[2])
            return 0;
    }

        Mat lastbgr;
    for(;;)
    {
        Mat depthMap;
        Mat validDepthMap;
        Mat disparityMap;
        Mat bgrImage;
        Mat grayImage;

        if( !capture.grab() )
        {
            cout << "Can not grab images." << endl;
            return -1;
        }
        else
        {
            if( retrievedImageFlags[0] && capture.retrieve( depthMap, CV_CAP_OPENNI_DEPTH_MAP ) )
            {
                const float scaleFactor = 0.05f;
                Mat show; depthMap.convertTo( show, CV_8UC1, scaleFactor );
                imshow( "depth map", show );
            }

            if( retrievedImageFlags[1] && capture.retrieve( disparityMap, CV_CAP_OPENNI_DISPARITY_MAP ) )
            {
                if( isColorizeDisp )
                {
                    Mat colorDisparityMap;
                    colorizeDisparity( disparityMap, colorDisparityMap, isFixedMaxDisp ? getMaxDisparity(capture) : -1 );
                    Mat validColorDisparityMap;
                    colorDisparityMap.copyTo( validColorDisparityMap, disparityMap != 0 );
                    imshow( "colorized disparity map", validColorDisparityMap );
                }
                else
                {
                    imshow( "original disparity map", disparityMap );
                }
            }

            if( retrievedImageFlags[2] && capture.retrieve( validDepthMap, CV_CAP_OPENNI_VALID_DEPTH_MASK ) )
                imshow( "valid depth mask", validDepthMap );

            if( retrievedImageFlags[3] && capture.retrieve( bgrImage, CV_CAP_OPENNI_BGR_IMAGE ) )
                imshow( "rgb image", bgrImage );

            if( retrievedImageFlags[4] && capture.retrieve( grayImage, CV_CAP_OPENNI_GRAY_IMAGE ) )
                imshow( "gray image", grayImage );
            
            if( capture.retrieve( bgrImage, CV_CAP_OPENNI_BGR_IMAGE) && capture.retrieve( disparityMap, CV_CAP_OPENNI_DISPARITY_MAP ) ){
                resize( disparityMap, disparityMap, bgrImage.size());
                Mat colorDisparityMap;
                colorizeDisparity( disparityMap, colorDisparityMap, isFixedMaxDisp ? getMaxDisparity(capture) : -1 );
                Mat validColorDisparityMap;
                colorDisparityMap.copyTo( validColorDisparityMap, disparityMap != 0 );

                if( !lastbgr.empty() ) {
                SurfFeatureDetector detector(400);
                vector<KeyPoint> keypoint1, keypoint2;
                detector.detect(bgrImage, keypoint1);
                detector.detect(lastbgr, keypoint2);
   
                SurfDescriptorExtractor extractor;
                Mat des1, des2;
                extractor.compute(bgrImage, keypoint1, des1);
                extractor.compute(lastbgr, keypoint2, des2);

                BFMatcher matcher(NORM_L2);
                vector< vector<DMatch> > matches;
                matcher.radiusMatch(des1, des2, matches, 0.2);

                Mat img = lastbgr;
                for(vector< vector<DMatch> >::iterator iter = matches.begin(); iter != matches.end(); iter++) {
                    if( iter -> size() > 0) {
                        DMatch match = iter->at(0);
                        line( img, keypoint1[match.queryIdx].pt, keypoint2[match.trainIdx].pt, CV_RGB(255,0,0), 1);
                    }
                }
                imshow("vector", img);
                }
                
                bgrImage.copyTo(lastbgr);

                addWeighted( validColorDisparityMap, 0.5, bgrImage, 0.5, 0.0, bgrImage );

                imshow( "mixed image", bgrImage);
            }
        }

        if( waitKey( 1 ) >= 0 )
            break;
    }

*/

	Capture capture;
	if( !capture.isOpened() ) {
		cout << "open fail" << endl;
		//return 0;
	}
	cout << capture.getInfo();

	if(RoboMap::loadMap("map"))
	{
		cout << "failed opeing map" << endl;
		return 0;
	}
	vector<astar::ASPoint> vec = astar::asFindPath(astar::ASPoint(0, 0));
	for(int i = 0; i < vec.size(); ++i)
		cout << "-" << vec[i].col << " " << vec[i].row << endl;
	return 0;

	Mat lastImg;
	Mat lastDepth;
	while(1) {
		if (! capture.grab()) 
			break;;
		Mat img = capture.getBGRImage();
		Mat depth = capture.getDepth();
		
		if( !lastImg.empty() && !img.empty() &&
			!lastDepth.empty() && !depth.empty()) {
			match(lastImg, img, lastDepth, depth);
		}

		img.copyTo(lastImg);
		depth.copyTo(lastDepth);
		if( waitKey(30) >= 0) 
			break;
	}

    return 0;
}