示例#1
0
bool
openCaptureDevice(VideoCapture &reader, int dev)
{
    bool opened = false;
    try
    {
        if (!reader.isOpened())
        {
            std::cout << "\nTrying to open device:\n " << dev << "\n";
            opened = reader.open(dev);
        }
    } catch (...)
    {
        std::cerr << "Stopping search. Cam not detected with input:\n " << dev
                  << "\n";
        reader.release();
        opened = false;
    }

    if (!opened)
    {
        reader.release();
    }

    return opened;
};
/*
This function handles the releasing of objects when this node is
requested or forced (via CTRL+C) to shutdown.
*/
void onShutdown(int sig){
	destroyWindow(CVWINDOW);
	cap1.release();
	cap2.release();
	cap3.release();
	ROS_INFO("All objects should have been released, proper shutdown complete");
	ros::shutdown();
}
void my_handler(int s)
{
    printf("Caught signal %d\n", s);
    control.stop();
    cap.release();
    exit(1);
}
//==============================================================================
int main(int argc,char** argv)
{
	//parse command line arguments
	if(argc < 2){cout << usage << endl; return 0;}
	if(parse_help(argc,argv)){cout << usage << endl; return 0;}
	
	//load detector model
	face_detector detector = load_ft<face_detector>(argv[1]);

	//open video stream
	VideoCapture cam; 
	if(argc > 2)cam.open(argv[2]); else cam.open(0);
	if(!cam.isOpened()){
		cout << "Failed opening video file." << endl
		 << usage << endl; return 0;
	}
	//detect until user quits
	namedWindow("face detector");
	while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
		Mat im; cam >> im;     
		vector<Point2f> p = detector.detect(im);
		if(p.size() > 0){
			for(int i = 0; i < int(p.size()); i++)
		circle(im,p[i],1,CV_RGB(0,255,0),2,CV_AA);
		}
		imshow("face detector",im);
		if(waitKey(10) == 'q')break;
	}
	destroyWindow("face detector"); cam.release(); return 0;
}
示例#5
0
    JNIEXPORT void JNICALL Java_org_opencv_highgui_VideoCapture_n_1release
    (JNIEnv* env, jclass, jlong self)
    {
        try {
#ifdef DEBUG
            LOGD("highgui::VideoCapture_n_1release()");
#endif // DEBUG
            VideoCapture* me = (VideoCapture*) self; //TODO: check for NULL
            me->release(  );

            return;
        } catch(cv::Exception e) {
#ifdef DEBUG
            LOGD("highgui::VideoCapture_n_1release() catched cv::Exception: %s", e.what());
#endif // DEBUG
            jclass je = env->FindClass("org/opencv/core/CvException");
            if(!je) je = env->FindClass("java/lang/Exception");
            env->ThrowNew(je, e.what());
            return;
        } catch (...) {
#ifdef DEBUG
            LOGD("highgui::VideoCapture_n_1release() catched unknown exception (...)");
#endif // DEBUG
            jclass je = env->FindClass("java/lang/Exception");
            env->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1release()}");
            return;
        }
    }
示例#6
0
int processVideo(VideoCapture& capture) {
    if(!capture.isOpened()){
        //error in opening the video input
        exit(EXIT_FAILURE);
    }
    Ptr<BackgroundSubtractor> pMOG2 = createBackgroundSubtractorMOG2(); //MOG2 approach
    //read input data. ESC or 'q' for quitting
    while( (char)keyboard != 'q' && (char)keyboard != 27 ){
        //read the current frame
        if(!capture.read(frame)) {
            cerr << "Unable to read next frame." << endl;
            cerr << "Exiting..." << endl;
            exit(EXIT_FAILURE);
        }
        //update the background model
        pMOG2->apply(frame, fgMaskMOG2,-2);
        //get the frame number and write it on the current frame
        stringstream ss;
        rectangle(frame, cv::Point(10, 2), cv::Point(100,20),
                cv::Scalar(140,89,255), -1);
        ss << capture.get(CAP_PROP_POS_FRAMES);
        string frameNumberString = ss.str();
        putText(frame, frameNumberString.c_str(), cv::Point(15, 15),
                FONT_HERSHEY_SIMPLEX, 0.5 , cv::Scalar(0,0,0));
        //show the current frame and the fg masks
        imshow("Frame", frame);
        imshow("FG Mask MOG 2", fgMaskMOG2);
        //get the input from the keyboard
        keyboard = waitKey( 30 );
    }
    //delete capture object
    capture.release();
    return 0;
}
int main()
{

  int key;

  VideoCapture webcam;
  webcam.open(0);

  Mat img;


  while(webcam.read(img))
  {
      imshow("WEbcam",img);

      key=cvWaitKey(10);  //wait   upto10 sec for next key stroke

      if(key==27)
         break;   // if user press escape (ESC) key then video capturing stops and last frame will be saved.
  }

  /*saving image*/
  imwrite("image.jpg",img);
  webcam.release();


  /*Re-display image*/
  Mat img2=imread("image.jpg");
  imshow("Taken Image",img2);
  cvWaitKey(0);

  return 0;
}
示例#8
0
int main(int argc, char * argv[]){
  VideoCapture cam;
 //Read options
  CommandLineParser parser(argc, argv, keys);
  string video = parser.get<string>("v");
  int f0 = parser.get<int>("i");
  //Init cam
  if (video != "null")
    cam.open(video);
  else
    cam.open(0);
  if (!cam.isOpened()){
	  cout << "cam device failed to open!" << endl;
    return 1;
  }
  //Read first frame
  Mat frame;
  cam.set(CV_CAP_PROP_POS_FRAMES,f0);
  cam.read(frame);
  while(cam.read(frame)){
  	//Display
    imshow("Player", frame);
    char key = cvWaitKey(33);
    if (key == 'q')
      break;
  }
  cam.release();
  return 0;
}
示例#9
0
文件: tcpclient.cpp 项目: subnr01/MPC
/**
 * this function provides a way to exit nicely from the system
 */
void quit(string msg, int retval)
{
    if (retval == 0) {
        cout << (msg == "NULL" ? "" : msg) << "\n" << endl;
    } else {
        cerr << (msg == "NULL" ? "" : msg) << "\n" << endl;
    }
    if (clientSock){
        close(clientSock);
    }
    if (capture.isOpened()){
        capture.release();
    }
    if (!(img0.empty())){
        (~img0);
    }
    if (!(img1.empty())){
        (~img1);
    }
    if (!(img2.empty())){
        (~img2);
    }
    pthread_mutex_destroy(&gmutex);
    exit(retval);
}
示例#10
0
void video_thread_CL(void* pParams)
{
	FaceDetector *faceDetector;
	if (threadUseCL){
		faceDetector = (FaceDetectorCL*)pParams;
	}
	else{
		faceDetector = (FaceDetectorCpu*)pParams;
	}
	
	std::string name = faceDetector->name();

	//HAAR_EYE_TREE_EYEGLASSES_DATA
	//HAAR_EYE_DATA
	//HAAR_FRONT_FACE_DEFAULT_DATA
	//LBP_FRONTAL_FACE
	//LBP_PROFILE_FACE
	faceDetector->load(HAAR_FRONT_FACE_DEFAULT_DATA);

	VideoCapture videoCapture;
	cv::Mat frame, frameCopy, image;

	videoCapture.open(faceDetector->videoFile().c_str());
	if (!videoCapture.isOpened()) { cout << "No video detected" << endl; return; }

	if (imshowFlag) { cv::namedWindow(name.c_str(), 1); }

	if (videoCapture.isOpened())
	{
		cout << "In capture ..." << name.c_str() << endl;
		while (videoCapture.grab())
		{
			if (!videoCapture.retrieve(frame, 0)) { break; }

			faceDetector->setSrcImg(frame, 1);
			faceDetector->doWork();
			if (imshowFlag){ cv::imshow(name.c_str(), faceDetector->resultMat()); }
			
			std::vector<cv::Rect> &faces_result = faceDetector->getResultFaces();
			std::cout << "face --" << name.c_str() << std::endl;
			for (int i = 0; i < faces_result.size(); ++i){
				std::cout << faces_result.at(i).x << ", " << faces_result.at(i).y << std::endl;
			}
			
			if (waitKey(10) >= 0){
				videoCapture.release();
				break;
			}

			Sleep(1);
		}
	}
	
	if (imshowFlag) { cvDestroyWindow(name.c_str()); }
	finishTaskFlag++;
	_endthread();
	return;

}
VOID inline GlobalExits(VOID)
{
	g_writer.release();
	g_cap.release();
	g_captureStat = capture_IDLE;
	SecureEndThread(threadStat);
	cv::destroyAllWindows();
	g_runningONE = FALSE;
}
	void unload() {
		if(cap != NULL) {
			if(cap->isOpened()) {
				cap->release();
			}
			delete cap;
			cap = NULL;
		}
	}
示例#13
0
int main(int argc, char *argv[])
{
    start_fps();
    sleep(1);

    /*********************PARAMETROS*****************/  
    if(argc<2) 
    {   
        Cwarning;
        printf("Nenhum argumento adicionado ao programa\n");
        Cwarning;
        printf("Por default o programa ira selecionar o maior ID de camera\n");

        int idCamera=3;
        
        cap.open(idCamera);
        while(!cap.isOpened()) 
        {

            Cerro;
            printf("Erro ao abrir a camera id %d!\n",idCamera);
            idCamera--;
            if(idCamera==-1)
                return -1;
            cap.release();
            cap.open(idCamera);
        }
        sleep(1);
    }
    else
    {
        char *local_video;      
        local_video=argv[1];
        C*k;
        printf("Video ! %s ! escolhido pelo usuario\n",local_video);
        cap.open(local_video);
        if(!cap.isOpened())
        {
            Cerro;
            printf("Arquivo nao encontrado !\n");
            return -1;
        }
        sleep(1);
    }
    /************************************************/ 

    pthread_t get_img;
    pthread_t show_img;

    pthread_create(&get_img, NULL, streaming , NULL); //pega imagem da camera ou do arquivo
    pthread_create(&show_img, NULL, image_show , NULL); //pega imagem da camera ou do arquivo

    pthread_join(get_img,NULL); 
    pthread_join(show_img,NULL); 


}
示例#14
0
extern "C" JNIEXPORT void JNICALL Java_narl_itrc_vision_CapVidcap_implDone(
	JNIEnv* env,
	jobject thiz
){
	PREPARE_CONTEXT;

	VideoCapture* vid = (VideoCapture*)(cntx);
	vid->release();
	delete vid;
}
示例#15
0
int main( int argc, char* argv[] )
{
	if(argc > 2)
		NOWRITE = 0;
	cout << "nowrite = " << NOWRITE << endl;
	namedWindow( "Example2_10", CV_WINDOW_AUTOSIZE );
	namedWindow( "Log_Polar", CV_WINDOW_AUTOSIZE );
	Mat bgr_frame;
	VideoCapture capture;
	if( argc < 2 || !capture.open( argv[1] ) ){
		help();
		cout << "Failed to open " << argv[1] << "\n" << endl;
		return -1;
	}

	double fps = capture.get(CV_CAP_PROP_FPS);
	cout << "fps = " << fps << endl;
	Size size((int)capture.get(CV_CAP_PROP_FRAME_WIDTH),
			(int)capture.get(CV_CAP_PROP_FRAME_HEIGHT));
	cout << " frame (w, h) = (" << size.width << ", " << size.height << ")" <<endl;
	VideoWriter writer;
	if(! NOWRITE)
	{ writer.open(  // On linux Will only work if you've installed ffmpeg development files correctly,
			argv[2],                               // otherwise segmentation fault.  Windows probably better.
			CV_FOURCC('M','J','P','G'),
			fps,
			size
	);
	}
	Mat logpolar_frame(size,CV_8UC3);
	Mat gray_frame(size,CV_8UC1);

	for(;;) {
		capture >> bgr_frame;
		if( bgr_frame.empty() ) break;
		imshow( "Example2_10", bgr_frame );
		cvtColor(   //We never make use of this gray image
				bgr_frame, gray_frame, CV_BGR2GRAY);
		IplImage lp = logpolar_frame;
		IplImage bgrf = bgr_frame;
		cvLogPolar( &bgrf, &lp,  //This is just a fun conversion the mimic's the human visual system
				cvPoint2D32f(bgr_frame.cols/2,
						bgr_frame.rows/2),
						40,
						CV_WARP_FILL_OUTLIERS );
		imshow( "Log_Polar", logpolar_frame );
		//Sigh, on linux, depending on your ffmpeg, this often won't work ...
		if(! NOWRITE)
			writer << logpolar_frame;
		char c = waitKey(10);
		if( c == 27 ) break;
	}

	capture.release();
}
static void on_trackbar(int, void*)
{
	//parameter settings
	capture.release();
	capture.open(0);
	capture.set(CV_CAP_PROP_FRAME_WIDTH, 150);
	capture.set(CV_CAP_PROP_FRAME_HEIGHT, 100);
	capture.set(CV_CAP_PROP_FPS, 25);

	capture.set(CV_CAP_PROP_EXPOSURE, para - 10);
}
示例#17
0
int main(){

    // Variables
    VideoCapture capture;
    VideoWriter writer;
    Mat frame;

    // Read from source
    capture.open(0);
    //capture.open("../Videos/chessboard-1.avi");

    // Check if the source was opened correctly
    if (!capture.isOpened()){
        cout << "Cannot open video device or file!" << endl;
        return -1;
    }

    // Read first frame (needed to configure VideoWriter)
    capture.read(frame);
    if (frame.empty()){
        printf("VideoCapture failed getting the first frame!\n");
        return -1;
    }

    // Open a video file for writing and check
    writer.open("./video.avi", CV_FOURCC('D','I','V','X'), 15, frame.size(), true);
    if( !writer.isOpened() ) {
        printf("VideoWriter failed to open!\n");
        return -1;
    }

    // Read the video
    while(true){

        // Read new frame
        capture.read(frame);
        if (frame.empty())
            break;

        // Write frame to a file
        writer.write(frame);

        // Show frame
        imshow("video", frame);

        if ((cvWaitKey(10) & 255) == 27) break;
    }

    // Release memory
    capture.release();
    frame.release();

    return 0;
}
示例#18
0
	inline void DeInitCameraDevice()
	{
		if (!destroyed)
		{
			if (-1 == system(paramOffStr))
			{
				cout<<"Can't find or set v4l2-ctrl values!"<<endl;
			}
			cap.release();
			destroyed=true;
		}
	}
示例#19
0
void CameraStreamer::stopMultiCapture()
{
    VideoCapture *cap;
    for (int i = 0; i < camera_count; i++)
    {
        cap = camera_capture[i];
        if (cap->isOpened()){
            //Relase VideoCapture resource
            cap->release();
            cout << "Capture " << i << " released" << endl;
        }       
    }
}
示例#20
0
int main(int argc, const char** argv)
{
	VideoCapture cap;
	cap.open(0);
	
	while(true){
		Mat frame;
		cap >> frame;//get frame from camera
		imshow("WebCam", frame);
		if(waitKey(30) >= 0) break;
	}
	//wait for key
	cap.release();
	return 0;
}
示例#21
0
int
countCameras(int maxTested = 5)
{
    VideoCapture cap;
    string       win_name;
    bool         cam_is_open;

    cout << "Starting search for cams..." << endl;

    for (int i = 0; i < maxTested; i++)
    {
        cam_is_open = openCaptureDevice(cap, i);
        if (cam_is_open)
        {
            cout << "Cam Found: " << i << endl;
            win_name = "test: " + to_string(i);
            namedWindow(win_name, WINDOW_AUTOSIZE);
            for (int j = 0; j < 5; j++)
            {
                Mat frame;
                cap.read(frame);
                imshow(win_name, frame);
                if (waitKey(33) == 27) break;
            }
            destroyWindow(win_name);
            cap.release();
        } else
        {
            return i - 1;
        }
    }

    if (cap.isOpened()) cap.release();

    return 0;
}
示例#22
0
void Video() {
	VideoCapture capture;
	//capture.open("rtsp://*****:*****@192.168.1.65/Streaming/Channels/1");	//����ͷ1
	//capture.open("rtsp://*****:*****@192.168.1.141:554/axis-media/media.amp");		//	����ͷ2
	//capture.open("rtsp://service:@10.153.33.11/?line=1$camera");	//�˴�ΰ ����
	string s = "rtsp://*****:*****@192.168.1.65/Streaming/Channels/1";
	capture.open(s);
	//capture.open(0);														//open ���ݱ�Ŵ�����ͷ

	cout << "-------------" << std::endl;
	if (!capture.isOpened())
	{
		cout << "Read video Failed !" << endl;
		return;
	}

	//cv::VideoCapture capture;

	Mat frame;
	//cv::namedWindow("video test");

	int frame_num = 800;

	for (int i = 0; i < frame_num - 1; ++i)
	{
		capture >> frame;
		capture.read(frame);

		//���۽���ǰ
		imshow("Video", frame);

		//���۽����� ��������ͷ���ٺ����� ǰ������ͷ������
		//frame = fisheye2pano2(frame);
		//imshow("FisheyeVideo", frame);

		if (waitKey(30) == 'q')
		{
			break;
		}
	}

	//cv::destroyWindow("video test");
	cv::destroyAllWindows();
	capture.release();

	//system("pause");
}
示例#23
0
void Panorama_Four_Video() {
	VideoCapture capture;
	//capture.open("rtsp://*****:*****@192.168.1.65/Streaming/Channels/1");	//摄像头1
	//capture.open("rtsp://*****:*****@192.168.1.141:554/axis-media/media.amp");		//	摄像头2
	//capture.open("rtsp://service:@10.153.33.11/?line=1$camera");				//邓大伟 左泽
	capture.open(0);															//open 根据编号打开摄像头

	cout << "-------------" << std::endl;
	if (!capture.isOpened())
	{
		cout << "Read video Failed !" << endl;
		return;
	}

	//cv::VideoCapture capture;

	Mat frame;
	//cv::namedWindow("video test");

	int frame_num = 800;

	for (int i = 0; i < frame_num - 1; ++i)
	{
		capture >> frame;
		capture.read(frame);

		//鱼眼矫正前
		//imshow("Video", frame);

		//鱼眼矫正后 鱼眼摄像头卡顿很严重 前置摄像头不卡顿
		frame = Panorama_Four(frame);
		imshow("FisheyeVideo", frame);

		if (waitKey(30) == 'q')
		{
			break;
		}
	}

	//cv::destroyWindow("video test");
	cv::destroyAllWindows();
	capture.release();

	//system("pause");
}
示例#24
0
void msgCallback(const std_msgs::String::ConstPtr& msg)
{
    camMsg = atoi(msg->data.c_str());
    if(camMsg == camId && !camOpen)
    {
        if(cam.open(camMsg))
        {
            cout << "BottomCam opened successfully." << endl;
            camOpen = true;
        }
    }
    else if(camMsg != camId && camOpen)
    {
        cam.release();
        cout << "BottomCam closed successfully." << endl;
        camOpen = false;
    }
}
示例#25
0
/*! \brief set parameter for capture, threadsave
 *
 * by function call, mutex will be locked or if already locked, call wait until mutex is unlocked
 * <br>
 * <table><tr><th colspan='3'>Possible Parameters to set:</th></tr>
 * <tr><th>name</th><th>value</th><th>description</th></tr>
 * <tr><td>capture.camera.choose</td><td>int</td><td>
 *    - value is used to deside which camera will be choosen
 *     (if they are more then one camera connected to the computer)<br>
 *    - working with active stream</td></tr>
 * <tr><td>capture.resolution.x<br>capture.resolution.width</td>
 *     <td>int</td><td>
 *     - try to set width of camera resolution</td></tr>
 * <tr><td>capture.resolution.y<br>capture.resolution.height</td>
 *     <td>int</td><td>
 *     - try to set height of camera resolution</td></tr>
 * </table>
 * <br>
 *
 * \param name parameter name
 * \param value parameter value (number or pointer)
 */
void setParameter(const char* name, int value)
{

    if(mutex_lock(mutex))
        g_Messages.push(string("setParameter</b> <font color='red'>Fehler bei mutex_lock</font>"));
    //printf("capture.setParameter: name: %s, value: %d\n", name, value);
    if(string(name) == string("capture.camera.choose"))
    {
        //  printf("Kamera Nummer: %d ausgewaehlt\n", value);
        if(g_run && (value != g_cfg.cameraNr || value == -1))
        {
            printf("value: %d", value);
            g_run = false;
            if(g_capture.isOpened())
                g_capture.release();
            g_capture.open(value);
            if(!g_capture.isOpened())
            {
                g_capture.open(g_cfg.cameraNr);
                g_run = true;
                if(mutex_unlock(mutex))
                    g_Messages.push(string("setParameter</b> <font color='red'>Fehler bei mutex_unlock</font>"));
                return;
            }
            g_run = true;
        }
        g_cfg.cameraNr = value;
        g_capture.set(CV_CAP_PROP_FRAME_WIDTH, g_cfg.width);
        g_capture.set(CV_CAP_PROP_FRAME_HEIGHT, g_cfg.height);
    }
    if(!g_run)
    {
        if(string(name) == string("capture.resolution.x") ||
                string(name) == string("capture.resolution.width"))
            g_cfg.width = value;
        else if(string(name) == string("capture.resolution.y")||
                string(name) == string("capture.resolution.height"))
            g_cfg.height = value;
    }
    if(mutex_unlock(mutex))
        g_Messages.push(string("setParameter</b> <font color='red'>Fehler bei mutex_unlock</font>"));

}
//==============================================================================
int main(int argc,char** argv)
{
    //parse command line arguments
    if(parse_help(argc,argv)){cout << usage << endl; return 0;}
    if(argc < 2){cout << usage << endl; return 0;}
    
    if (argc<1) {
        return 0;
    }
    string fname =string(argv[1]);
    ft_data  ftdata = load_ft_jzp(fname);
    
    //load detector model
    face_tracker tracker = load_ft<face_tracker>(string(ftdata.baseDir+"trackermodel.yaml").c_str());
    tracker.detector.baseDir = ftdata.baseDir;
    
    
    //create tracker parameters
    face_tracker_params p; p.robust = false;
    p.ssize.resize(3);
    p.ssize[0] = Size(21,21);
    p.ssize[1] = Size(11,11);
    p.ssize[2] = Size(5,5);
    
    //open video stream
    VideoCapture cam;
    if(argc > 2)cam.open(argv[2]); else cam.open(0);
    if(!cam.isOpened()){
        cout << "Failed opening video file." << endl
        << usage << endl; return 0;
    }
    //detect until user quits
    namedWindow("face tracker");
    while(cam.get(CV_CAP_PROP_POS_AVI_RATIO) < 0.999999){
        Mat im; cam >> im;
        if(tracker.track(im,p))
            tracker.draw(im);
        imshow("face tracker",im);
        int c = waitKey(10)%256;
        if(c == 'q')break;
    }
    destroyWindow("face tracker"); cam.release(); return 0;
}
示例#27
0
/**
 * Temporal smoothing for a colored picture
 */
Mat temporalSmoothingColor(String filename){

    /*Results to to return*/
    Mat background;
    /*First ten pictures of the video*/
    Mat stockage[10];
    /** Video*/
    VideoCapture vc = VideoCapture(filename);

    /*Acquiring the ten pictures*/
    for(int i = 0; i< 10; i++){
        vc >> stockage[i];
    }
    background = Mat(stockage[0].size(), stockage[0].type());
	
	double h = 0.1;
    double res0, res1, res2;
    int lignes = background.rows;
    int colonnes = background.cols;
    int pas = background.step;
	
    /*Creation of a picture where each pixel is the average value of the matching pixel in the ten pictures*/ 
    for(int x = 0; x < lignes; x++){
        for(int y = 0; y < colonnes; y ++){

            res0 = res1 = res2 = 0;
            for (int u = 0; u < 10; u++)
            {
				//res = res + (h*(stockage[u].at<Vec3b>(x, y)[k]));
				res0 += stockage[u].data[x*pas+y*3+0];
				res1 += stockage[u].data[x*pas+y*3+1];
				res2 += stockage[u].data[x*pas+y*3+2];
            }
            background.data[x*pas+y*3+0] = (unsigned char)(res0 * h);
            background.data[x*pas+y*3+1] = (unsigned char)(res1 * h);
            background.data[x*pas+y*3+2] = (unsigned char)(res2 * h);

        }
    }
    vc.release();
    //spatialSmoothingAvgColor(background, 1);
    return background;
}
示例#28
0
int main(int argc, char** argv)
{

	VideoCapture cap;

	init(argc, argv, cap);

	loadLastLine("thresholds.txt");

	setupGUI();

	Mat tmp_frame, out_frame;

	for (;;)
	{
		cap >> tmp_frame;
		if (!tmp_frame.data)
			break;
		//fineImage(tmp_frame, out_frame);
		printHSV(tmp_frame);
		detectColor(tmp_frame, out_frame);
		if (isEdgeDetect) {
			findMassCenter(out_frame, out_frame);
		}
		imshow(winIn, tmp_frame);
		imshow(winOut, out_frame);
		char keycode = waitKey(30);
		if (keycode == 's') {
			save("thresholds.txt");
		}
		if (keycode == 27)
			break;
		if (keycode == 'e') {
			isEdgeDetect = !isEdgeDetect;
		}

	}

	save("thresholds.txt");

	cap.release();
	return 0;
}
bool QtTrajectoryAnalysis::loadImage(std::string path){
	videoName = path;

	VideoCapture videoCapture;
	if ( !videoCapture.open(videoName)){
		QMessageBox::information(0, QString::fromLocal8Bit("提示"), 
			QString::fromLocal8Bit("打开视频失败!"));
		return false;
	}

	cv::Mat image;
	videoCapture>>image;

	//输出到界面
	setImage(image);

	videoCapture.release();
	return true;
}
示例#30
0
/**
 * Temporal smoothing for a grayscale picture
 */
Mat temporalSmoothing(String filename){

    /*Results to to return*/
    Mat background;
    /*First ten pictures of the video*/
    Mat stockage[10];
    
    Mat temp;
    /** Video*/
    VideoCapture vc = VideoCapture(filename);

    /*Acquiring the ten pictures*/
    for(int i = 0; i< 10; i++){
        vc >> temp;
		/*Switching from a colored picture to a greyscale one*/
        cvtColor(temp, stockage[i], CV_BGR2GRAY);
    }
    background = Mat(stockage[0].size(), stockage[0].type());
	
	double h = 0.1;
    double res;
    int lignes = background.rows;
    int colonnes = background.cols;
    int pas = background.step;
	
    /*Creation of a picture where each pixel is the average value of the matching pixel in the ten pictures*/ 
    for(int x = 0; x < lignes; x++){
        for(int y = 0; y < colonnes; y ++){

            res = 0;
            for (int u = 0; u < 10; u++)
            {
				res += stockage[u].data[x*pas+y];
            }
            background.data[x*pas+y] = (unsigned char)(res * h);

        }
    }
    vc.release();
    //spatialSmoothingAvgColor(background, 1);
    return background;
}