void ShapeModel::viewShapeModel()
{
    int q1, q2;
    static ModelViewInfo vData;
    vData.vList.resize(this->nShapeParams, 30/2);
    vData.pModel = this;
    vData.curParam = 0;
    viewShapeModelUpdate(&vData);
    q1 = 15;
    q2 = 0;
    namedWindow("Viewing Shape Model", CV_WINDOW_AUTOSIZE);
    createTrackbar("param value", "Viewing Shape Model", 
                   &q1, 30, &viewShapeUpdateValue, &vData);
    createTrackbar("which param", "Viewing Shape Model", 
                   &q2, nShapeParams-1, &viewShapeUpdateCurParam, &vData);
}
Esempio n. 2
0
void FingerTracker::Setup()
{
	VideoCapture capture(0);
	if (!capture.isOpened())
	{
		throw std::runtime_error("Could not start camera capture");
	}

	int windowSize = 25;
	int Xpos = 200;
	int Ypos = 50;
	int update = 0;
	int buttonClicked = 0;
	namedWindow("RGB", CV_WINDOW_AUTOSIZE);
	createTrackbar("X", "RGB",  &Xpos, 320, TrackBarCallback, (void*)&update);
	createTrackbar("Y", "RGB",  &Ypos, 240, TrackBarCallback, (void*)&update);
	createTrackbar("Size", "RGB",  &windowSize, 100, TrackBarCallback, (void*)&update);
	setMouseCallback("RGB", MouseCallback, (void*)&buttonClicked);
	Mat fingerWindowBackground, fingerWindowBackgroundGray;

	m_calibrationData.reset(new CalibrationData());
	bool ticking = false;
	std::chrono::system_clock::time_point start = std::chrono::system_clock::now();
    while (true)
	{  		
		Mat frame, frameHSV;

		if (capture.read(frame))
		{
			flip(frame, frame, 1);

			pyrDown(frame, frame, Size(frame.cols / 2, frame.rows / 2));

			Rect fingerWindow(Point(Xpos, Ypos), Size(windowSize, windowSize*3));
			if (Xpos + windowSize >= frame.cols || Ypos + windowSize*3 >= frame.rows)
			{
				windowSize = 20;
				Xpos = 200;
				Ypos = 50;
				update = 0;
			}
			else if (buttonClicked == 1)
			{
				frame(fingerWindow).copyTo(fingerWindowBackground);
				cvtColor(fingerWindowBackground, fingerWindowBackgroundGray, CV_BGR2GRAY);
				buttonClicked  = 0;
				update = 0;
				cvDestroyAllWindows();
			}

			if (fingerWindowBackgroundGray.rows && !m_calibrationData->m_ready)
			{
				Mat diff, thd;
				absdiff(frame(fingerWindow), fingerWindowBackground, diff);
				std::vector<Mat> ch;
				split(diff, ch);
				threshold(ch[0], ch[0], m_calibrationDiffThreshold, 255, 0);
				threshold(ch[1], ch[1], m_calibrationDiffThreshold, 255, 0);
				threshold(ch[2], ch[2], m_calibrationDiffThreshold, 255, 0);
				thd = ch[0];
				add(thd, ch[1], thd);
				add(thd, ch[2], thd);
				medianBlur(thd, thd, 5);

				Mat top, middle, bottom;
				Rect r1 = Rect(0, 0, thd.cols, thd.rows/3);
				Rect r2 = Rect(0, thd.rows / 3 + 1, thd.cols, thd.rows/3);
				Rect r3 = Rect(0, thd.rows * 2 / 3 + 1, thd.cols, thd.rows -  thd.rows * 2 / 3 - 1);
				top = thd(r1);
				middle = thd(r2);
				bottom = thd(r3);			

				auto percentageTop = countNonZero(top) * 100.0 / top.size().area();
				auto percentageMiddle = countNonZero(middle) * 100.0 / middle.size().area();
				auto percentageBottom = countNonZero(bottom) * 100.0 / bottom.size().area();

				bool topReady = false;
				bool middleReady = false;
				bool bottomReady = false;

				Scalar c1, c2, c3;
				if (percentageTop > m_calibrationTopLowerThd && percentageTop < m_calibrationTopUpperThd)
				{
					topReady = true;
					c1 = Scalar(0, 255, 255);
				}
				else
				{
					c1 = Scalar(0, 0, 255);
				}

				if (percentageMiddle > m_calibrationMiddleLowerThd && percentageMiddle < m_calibrationMiddleUppperThd)
				{
					middleReady = true;					
					c2 = Scalar(0, 255, 255);
				}
				else
				{
					c2 = Scalar(0, 0, 255);
				}

				if (percentageBottom > m_calibrationBottomLowerThd && percentageBottom < m_calibrationBottomUpperThd)
				{
					bottomReady = true;
					c3 = Scalar(0, 255, 255);
				}
				else
				{
					c3 = Scalar(0, 0, 255);
				}

				bool readyToGo = false;
				if (middleReady && topReady && bottomReady)
				{
					c1 = Scalar(0, 255, 0);
					c2 = Scalar(0, 255, 0);
					c3 = Scalar(0, 255, 0);
					if (!ticking)
					{
						start = std::chrono::system_clock::now();
						ticking = true;
					}
					if (std::chrono::system_clock::now() - start > std::chrono::seconds(1))
					{
						readyToGo = true;
					}
				}
				else
				{
					ticking = false;
				}

#if ENABLE_DEBUG_WINDOWS
				std::stringstream ss;
				ss << percentageTop << ", " << percentageMiddle << ", " << percentageBottom;
				putText(frame, ss.str(), Point(0, getTextSize(ss.str(), 0, 0.5, 1, nullptr).height), 0, 0.5, Scalar::all(255), 1);
				cv::imshow("Thresholded", thd);
#endif

				if (percentageTop >= m_calibrationTopUpperThd && percentageBottom >= m_calibrationBottomUpperThd && percentageMiddle >= m_calibrationMiddleUppperThd)
				{
					putText(frame, "Move finger away from camera", Point(0, getTextSize("Move finger away from camera", 0, 0.5, 1, nullptr).height), 0, 0.5, Scalar::all(255), 1);
				}
				else if (percentageTop <= m_calibrationTopLowerThd && percentageBottom <= m_calibrationBottomLowerThd && percentageMiddle <= m_calibrationMiddleLowerThd)
				{
					putText(frame, "Move finger closer to camera", Point(0, getTextSize("Move finger closer to camera", 0, 0.5, 1, nullptr).height), 0, 0.5, Scalar::all(255), 1);
				}
				
				if (readyToGo)
				{
					Mat framePatchHSV;
					cvtColor(frame(fingerWindow), framePatchHSV, CV_BGR2HSV);
					cvtColor(frame, frameHSV, CV_BGR2HSV);

					MatND hist;
					calcHist(&framePatchHSV, 1, m_calibrationData->m_channels, thd, hist, 2, m_calibrationData->m_histSize, (const float**)m_calibrationData->m_ranges, true, false);
					m_calibrationData->m_hist = hist;
					normalize(m_calibrationData->m_hist, m_calibrationData->m_hist, 0, 255, NORM_MINMAX, -1, Mat());

#if ENABLE_DEBUG_WINDOWS

					double maxVal=0;
					minMaxLoc(m_calibrationData->m_hist, 0, &maxVal, 0, 0);
					int scale = 10;
					Mat histImg = Mat::zeros(m_calibrationData->m_sbins*scale, m_calibrationData->m_hbins*10, CV_8UC3);
					for( int h = 0; h < m_calibrationData->m_hbins; h++)
					{
						for( int s = 0; s < m_calibrationData->m_sbins; s++ )
						{
							float binVal = m_calibrationData->m_hist.at<float>(h, s);
							int intensity = cvRound(binVal*255/maxVal);
							rectangle( histImg, Point(h*scale, s*scale),
										Point( (h+1)*scale - 1, (s+1)*scale - 1),
										Scalar::all(intensity),
										CV_FILLED );
						}
					}

					imshow("H-S Histogram", histImg);
#endif
					m_calibrationData->m_ready = true;
					frame(fingerWindow).copyTo(m_calibrationData->m_fingerPatch);
					m_calibrationData->m_fingerRect = fingerWindow;
					m_currentCandidate.m_windowRect = fingerWindow;
					m_currentCandidate.m_fingerPosition = fingerWindow.tl();
					return;
				}

				rectangle(frame, r1.tl() + fingerWindow.tl(), r1.br() + fingerWindow.tl(), c1);
				rectangle(frame, r2.tl() + fingerWindow.tl(), r2.br() + fingerWindow.tl(), c2);
				rectangle(frame, r3.tl() + fingerWindow.tl(), r3.br() + fingerWindow.tl(), c3);
				imshow("Calibration", frame);
			}
			else
			{
				int baseline = 0;
				putText(frame, "Adjust calibration window, click when ready", Point(0, getTextSize("Adjust calibration window", 0, 0.4, 2, &baseline).height), 0, 0.4, Scalar::all(255), 1);
				rectangle(frame, fingerWindow.tl(), fingerWindow.br(), Scalar(0, 0, 255));
				imshow("RGB", frame);
			}
			auto key = cvWaitKey(10);
			if (char(key) == 27)
			{
				break;
			}
		}
	}
	capture.release();
}
Esempio n. 3
0
int main(int argc, char **argv)
{
    //Create a CMT object
    Config config("FAST", "BRISK","RANSAC",0.5);
    CMT cmt(config);

    //Initialization bounding box
    Rect rect;

    //Parse args
    int challenge_flag = 0;
    int loop_flag = 0;
    int verbose_flag = 0;
    int bbox_flag = 0;
    int skip_frames = 0;
    int skip_msecs = 0;
    int output_flag = 0;
    string input_path;
    string output_path;

    const int detector_cmd = 1000;
    const int descriptor_cmd = 1001;
    const int bbox_cmd = 1002;
    const int no_scale_cmd = 1003;
    const int with_rotation_cmd = 1004;
    const int skip_cmd = 1005;
    const int skip_msecs_cmd = 1006;
    const int output_file_cmd = 1007;

    struct option longopts[] =
    {
        //No-argument options
        {"challenge", no_argument, &challenge_flag, 1},
        {"loop", no_argument, &loop_flag, 1},
        {"verbose", no_argument, &verbose_flag, 1},
        {"no-scale", no_argument, 0, no_scale_cmd},
        {"with-rotation", no_argument, 0, with_rotation_cmd},
        //Argument options
        {"bbox", required_argument, 0, bbox_cmd},
        {"detector", required_argument, 0, detector_cmd},
        {"descriptor", required_argument, 0, descriptor_cmd},
        {"output-file", required_argument, 0, output_file_cmd},
        {"skip", required_argument, 0, skip_cmd},
        {"skip-msecs", required_argument, 0, skip_msecs_cmd},
        {0, 0, 0, 0}
    };

    int index = 0;
    int c;
    while((c = getopt_long(argc, argv, "v", longopts, &index)) != -1)
    {
        switch (c)
        {
            case 'v':
                verbose_flag = true;
                break;
            case bbox_cmd:
                {
                    //TODO: The following also accepts strings of the form %f,%f,%f,%fxyz...
                    string bbox_format = "%f,%f,%f,%f";
                    float x,y,w,h;
                    int ret = sscanf(optarg, bbox_format.c_str(), &x, &y, &w, &h);
                    if (ret != 4)
                    {
                        cerr << "bounding box must be given in format " << bbox_format << endl;
                        return 1;
                    }

                    bbox_flag = 1;
                    rect = Rect(x,y,w,h);
                }
                break;
            case detector_cmd:
                cmt.str_detector = optarg;
                break;
            case descriptor_cmd:
                cmt.str_descriptor = optarg;
                break;
            case output_file_cmd:
                output_path = optarg;
                output_flag = 1;
                break;
            case skip_cmd:
                {
                    int ret = sscanf(optarg, "%d", &skip_frames);
                    if (ret != 1)
                    {
                      skip_frames = 0;
                    }
                }
                break;
            case skip_msecs_cmd:
                {
                    int ret = sscanf(optarg, "%d", &skip_msecs);
                    if (ret != 1)
                    {
                      skip_msecs = 0;
                    }
                }
                break;
            case no_scale_cmd:
                cmt.consensus.estimate_scale = false;
                break;
            case with_rotation_cmd:
                cmt.consensus.estimate_rotation = true;
                break;
            case '?':
                return 1;
        }

    }

    // Can only skip frames or milliseconds, not both.
    if (skip_frames > 0 && skip_msecs > 0)
    {
      cerr << "You can only skip frames, or milliseconds, not both." << endl;
      return 1;
    }

    //One argument remains
    if (optind == argc - 1)
    {
        input_path = argv[optind];
    }

    else if (optind < argc - 1)
    {
        cerr << "Only one argument is allowed." << endl;
        return 1;
    }

    //Set up logging
    FILELog::ReportingLevel() = verbose_flag ? logDEBUG : logINFO;
    Output2FILE::Stream() = stdout; //Log to stdout

    //Challenge mode
    if (challenge_flag)
    {
        //Read list of images
        ifstream im_file("images.txt");
        vector<string> files;
        string line;
        while(getline(im_file, line ))
        {
            files.push_back(line);
        }

        //Read region
        ifstream region_file("region.txt");
        vector<float> coords = getNextLineAndSplitIntoFloats(region_file);

        if (coords.size() == 4) {
            rect = Rect(coords[0], coords[1], coords[2], coords[3]);
        }

        else if (coords.size() == 8)
        {
            //Split into x and y coordinates
            vector<float> xcoords;
            vector<float> ycoords;

            for (size_t i = 0; i < coords.size(); i++)
            {
                if (i % 2 == 0) xcoords.push_back(coords[i]);
                else ycoords.push_back(coords[i]);
            }

            float xmin = *min_element(xcoords.begin(), xcoords.end());
            float xmax = *max_element(xcoords.begin(), xcoords.end());
            float ymin = *min_element(ycoords.begin(), ycoords.end());
            float ymax = *max_element(ycoords.begin(), ycoords.end());

            rect = Rect(xmin, ymin, xmax-xmin, ymax-ymin);
            cout << "Found bounding box" << xmin << " " << ymin << " " <<  xmax-xmin << " " << ymax-ymin << endl;
        }

        else {
            cerr << "Invalid Bounding box format" << endl;
            return 0;
        }

        //Read first image
        Mat im0 = imread(files[0]);
        Mat im0_gray;
        cvtColor(im0, im0_gray, CV_BGR2GRAY);

        //Initialize cmt
        cmt.initialize(im0_gray, rect);

        //Write init region to output file
        ofstream output_file("output.txt");
        output_file << rect.x << ',' << rect.y << ',' << rect.width << ',' << rect.height << std::endl;

        //Process images, write output to file
        for (size_t i = 1; i < files.size(); i++)
        {
            FILE_LOG(logINFO) << "Processing frame " << i << "/" << files.size();
            Mat im = imread(files[i]);
            Mat im_gray;
            cvtColor(im, im_gray, CV_BGR2GRAY);
            cmt.processFrame(im_gray);
            if (verbose_flag)
            {
                display(im, cmt);
            }
            rect = cmt.bb_rot.boundingRect();
            output_file << rect.x << ',' << rect.y << ',' << rect.width << ',' << rect.height << std::endl;
        }

        output_file.close();

        return 0;
    }

    //Normal mode

    //Create window
    namedWindow(WIN_NAME);

    VideoCapture cap;

    bool show_preview = true;

    //If no input was specified
    if (input_path.length() == 0)
    {
        cap.open(0); //Open default camera device
    }

    //Else open the video specified by input_path
    else
    {
        cap.open(input_path);

        if (skip_frames > 0)
        {
          cap.set(CV_CAP_PROP_POS_FRAMES, skip_frames);
        }

        if (skip_msecs > 0)
        {
          cap.set(CV_CAP_PROP_POS_MSEC, skip_msecs);

          // Now which frame are we on?
          skip_frames = (int) cap.get(CV_CAP_PROP_POS_FRAMES);
        }

        show_preview = false;
    }

    //If it doesn't work, stop
    if(!cap.isOpened())
    {
        cerr << "Unable to open video capture." << endl;
        return -1;
    }

    //Show preview until key is pressed
    while (show_preview)
    {
        Mat preview;
        cap >> preview;

        screenLog(preview, "Press a key to start selecting an object.");
        imshow(WIN_NAME, preview);

        char k = waitKey(10);
        if (k != -1) {
            show_preview = false;
        }
    }

    //Get initial image
    Mat im0;
    cap >> im0;

    //If no bounding was specified, get it from user
    if (!bbox_flag)
    {
        rect = getRect(im0, WIN_NAME);
    }

    FILE_LOG(logINFO) << "Using " << rect.x << "," << rect.y << "," << rect.width << "," << rect.height
        << " as initial bounding box.";

    //Convert im0 to grayscale
    Mat im0_gray;
    if (im0.channels() > 1) {
        cvtColor(im0, im0_gray, CV_BGR2GRAY);
    } else {
        im0_gray = im0;
    }

    //Initialize CMT
    cmt.initialize(im0_gray, rect);

    int frame = skip_frames;

    //Open output file.
    ofstream output_file;

    if (output_flag)
    {
        int msecs = (int) cap.get(CV_CAP_PROP_POS_MSEC);

        output_file.open(output_path.c_str());
        output_file << OUT_FILE_COL_HEADERS << endl;
        output_file << frame << "," << msecs << ",";
        output_file << cmt.points_active.size() << ",";
        output_file << write_rotated_rect(cmt.bb_rot) << endl;
    }

    //Main loop
    while (true)
    {
        frame++;

        Mat im;

        //If loop flag is set, reuse initial image (for debugging purposes)
        if (loop_flag) im0.copyTo(im);
        else cap >> im; //Else use next image in stream

        if (im.empty()) break; //Exit at end of video stream

        Mat im_gray;
        if (im.channels() > 1) {
            cvtColor(im, im_gray, CV_BGR2GRAY);
        } else {
            im_gray = im;
        }

        //Let CMT process the frame
        cmt.processFrame(im_gray);

        //Output.
        if (output_flag)
        {
            int msecs = (int) cap.get(CV_CAP_PROP_POS_MSEC);
            output_file << frame << "," << msecs << ",";
            output_file << cmt.points_active.size() << ",";
            output_file << write_rotated_rect(cmt.bb_rot) << endl;
        }
        else
        {
            //TODO: Provide meaningful output
            FILE_LOG(logINFO) << "#" << frame << " active: " << cmt.points_active.size();
        }

        //Display image and then quit if requested.
        char key = display(im, cmt);
        if(key == 'q') break;
    }

    //Close output file.
    if (output_flag) output_file.close();

    return 0;
}
Esempio n. 4
0
int main(int argc, char **argv)
{
    CMT cmt;
    Rect rect;
    
    FILELog::ReportingLevel() = logINFO; // = logDEBUG;
    Output2FILE::Stream() = stdout; //Log to stdout

    // openCV window
    namedWindow(WIN_NAME);

    // libuvc variables
    uvc_context *ctx = NULL;
    uvc_device_t *dev = NULL;
    uvc_device_handle_t *handle = NULL;
    uvc_stream_ctrl_t ctrl;
    uvc_error_t res;

    // create a UVC context
    res = uvc_init(&ctx, NULL);
    uvc_error(res, "init");

    // search for the Intel camera specifically
    res = uvc_find_device(ctx, &dev, 0x8086, 0x0a66, NULL);
    uvc_error(res, "find_device");

    // open the camera device
    res = uvc_open(dev, &handle);
    uvc_error(res, "open2");

    //uvc_print_diag(handle, stderr);

    // configure the stream format to use
    res = uvc_get_stream_ctrl_format_size(handle, &ctrl, UVC_FRAME_FORMAT_YUYV, W, H, FPS);
    uvc_perror(res, "get_stream");

    //uvc_print_stream_ctrl(&ctrl, stderr);

    // OpenCV matrix for storing the captured image data, CV_8UC3
    // means 8-bit Unsigned Char, 3 channels per pixel.
    Mat im;
    im.create( H, W, CV_8UC3);

    image_data = malloc(W*H*3);

    // start streaming data from the camera
    printf("Start streaming...\n");
    res = uvc_start_streaming(handle, &ctrl, uvc_callback, (void*)&cmt, 0);
    uvc_error(res, "start_streaming");
    
    // wait until the first frame has arrived so it can be used for previewing
    printf("Waiting for first frame...\n");
    while(!got_first_frame)
        usleep(10000);

    // grab the data for the captured frame
    memcpy(im.data, image_data, W*H*3);

    // display it in OpenCV and select a region to track
    rect = getRect(im, WIN_NAME);
    printf("Bounding box: %d,%d+%dx%d\n", rect.x, rect.y, rect.width, rect.height);

    // convert to suitable format for CMT if needed
    Mat im0_gray;
    if (im.channels() > 1) 
        cvtColor(im, im0_gray, CV_BGR2GRAY);
    else 
        im0_gray = im;

    cmt.initialize(im0_gray, rect);
    printf("Main loop starting\n");

    int frame = 0;
    while (true)
    {
        frame++;

        // grab the data for the captured frame
        memcpy(im.data, image_data, W*H*3);

        Mat im_gray;
        if (im.channels() > 1) 
            cvtColor(im, im_gray, CV_BGR2GRAY);
        else
            im_gray = im;

        cmt.processFrame(im_gray);
       
        printf("#%d, active: %lu\n", frame, cmt.points_active.size());
        char key = display(im, cmt);
        if(key == 'q')
            break;
    }

    uvc_stop_streaming(handle);

    free(image_data);

    uvc_close(handle);
    uvc_unref_device(dev);
    uvc_exit(ctx);

    return 0;
}