コード例 #1
0
ファイル: vcrop.cpp プロジェクト: Kazz47/my_utils
VCrop::VCrop(cv::VideoCapture &capture, const float &x, const float &y, const float &size) : capture(&capture) {
    if (!capture.isOpened()) {
        std::string error_message = "Error when reading input stream";
        LOG(ERROR) << error_message;
        throw error_message;
    }

    int frame_width = capture.get(CV_CAP_PROP_FRAME_WIDTH);
    int frame_height = capture.get(CV_CAP_PROP_FRAME_HEIGHT);
    VLOG(2) << "Frame Width: " << frame_width;
    VLOG(2) << "Frame Height: " << frame_height;
    LOG_IF(FATAL, frame_width <= 0) << "Frame width is less than zero.";
    LOG_IF(FATAL, frame_height <= 0) << "Frame height is less than zero.";
    float diameter = sqrt(frame_width * frame_width + frame_height * frame_height);
    cv::Point2i top_left(frame_width * x, frame_height * y);
    cv::Size2i rect_size(diameter * size, diameter * size);
    if (top_left.x + rect_size.width > frame_width || top_left.y + rect_size.height > frame_height) {
        LOG(ERROR) << "Size(" << rect_size << ") to too large for given x(" << top_left.x << ") and y(" << top_left.y << ") coordinate.";
    }
    roi = new cv::Rect(top_left, rect_size);
    VLOG(1) << "RoI: \t" << *roi;

    frame_rate = capture.get(CV_CAP_PROP_FPS);
    if (isnan(frame_rate) || frame_rate <= 0) {
        LOG(WARNING) << "Failed to get frame rate, setting rate to 10fps.";
        frame_rate = 10;
    }
    VLOG(1) << "Frame Rate: \t" << frame_rate;
}
コード例 #2
0
ファイル: main.cpp プロジェクト: sokke90/MAR-s1
void initVideoStream(cv::VideoCapture &cap)
{
	if (cap.isOpened())
		cap.release();

	cap.open(0); // open the default camera
}
コード例 #3
0
ファイル: mono-playback.cpp プロジェクト: fengmushu/flight
void mono_handler(const lcm_recv_buf_t *rbuf, const char* channel, const lcmt_stereo *msg, void *user) {
    // open the frame of the video specified by the message

    // check to see if the current video is the correct video file

    if (current_video_number != msg->video_number) {

        video_capture.release();

        std::string newfile = GetMonoFilename(msg->timestamp, msg->video_number);

        if (newfile.empty()) {
            return;
        }

        std::cout << "Opening file: " << newfile << std::endl;

        if (!video_capture.open(newfile)) {
            std::cerr << "Failed to open file: " << newfile << std::endl;
            return;
        }
        current_video_number = msg->video_number;
    }

    video_capture.set(CV_CAP_PROP_POS_FRAMES, msg->frame_number + frame_offset);

    cv::Mat frame;

    video_capture >> frame;

    SendImageOverLcm(lcm_, image_channel, frame);

}
コード例 #4
0
void InitOpenCVModules() //OPENCV 데이터들의 초기화
{
	/*----------------------*/
	//
	//
	//
	//
	/*-----------------------*/
	

	if (Webcam_mode)
	{
		camera.open(Webcam_number);

		if (!camera.isOpened())  //소스 영상 에러체크
		{
			//error in opening the video input
			cerr << "Unable to open Camera Stream" << endl;
			exit(EXIT_FAILURE);
		}


		camera >> Current_Frame; //카메라 소스에서 Current Frame으로 데이터를 넣어준다.
	}

	else
	{
コード例 #5
0
ファイル: main.cpp プロジェクト: vadimgg/Quadcopter495
cv::Mat readFrameIndex(cv::VideoCapture cap, int frameNum){
	int idx = frameNum-1;
	cv::Mat frame;
	bool success;
	cap.set ( CV_CAP_PROP_POS_FRAMES , idx );
	success = cap.retrieve(frame);
	return frame;
}
コード例 #6
0
ファイル: subsextractor.cpp プロジェクト: egrosclaude/subs
bool SubsExtractor::open(string file)
{
	videofile = file;
	bool o = cap->open(videofile);
	StartFrame = 0;
	EndFrame = cap->get(CV_CAP_PROP_FRAME_COUNT);
	return o;
}
コード例 #7
0
void shutterCB(int pos, void* param){
struct timeval t;

cap.set(CV_CAP_PROP_EXPOSURE,pos);

//fcount=0; // Reset frame counter, so we dont have do wait for the avg to "catch" up



std::cout << "CALLBACK !!!: pos:  " << pos << "Shutter read: " << cap.get(CV_CAP_PROP_EXPOSURE) << std::endl;
}
コード例 #8
0
bool init_opencv()
{
	if (!capture.open(0)) {
		std::cerr << "error: capture.open() failed..." << std::endl;
		exit(-1);
	}
	capture.set(CV_CAP_PROP_FRAME_WIDTH, 640);
	capture.set(CV_CAP_PROP_FRAME_HEIGHT, 480);

	result_img.create(cv::Size(640, 480), CV_8UC3);

	return true;
}
コード例 #9
0
	static int openVideo(cv::VideoCapture &capture, std::string filename){
		std::cout << "abriendo video " << filename << std::endl;
		capture.open(filename);
		if (!capture.isOpened()) {
			std::cout << "abriendo camara " << filename << std::endl;
			int id_webcam = std::atoi(filename.c_str());
			capture.open(id_webcam);
		}
		if (!capture.isOpened()) {
			// std::cout << "no puedo abrir " << filename << std::endl;
			return 0;
		}
			return 1;
		}
コード例 #10
0
void* grabFunc(void* in_data)
{
    while(running)
    {
        pthread_mutex_lock(&mutexLock);
        if(camera.isOpened())
        {
            camera.grab();
        }
        pthread_mutex_unlock(&mutexLock);
    }

    std::cout << "Grab thread exit." << std::endl;
}
コード例 #11
0
void cameraInit(){
	/*
		cv::Mat frame;
		cap >> frame; // get a new frame from camera
		//cvtColor(frame, edges, CV_BGR2GRAY);
		//GaussianBlur(edges, edges, Size(7,7), 1.5, 1.5);
		//Canny(edges, edges, 0, 30, 3);
		imshow("edges", frame);
		cv::waitKey(30);
	}
	*/
	cap.set(CV_CAP_PROP_FRAME_WIDTH,1280);
    cap.set(CV_CAP_PROP_FRAME_HEIGHT,720);
	cap.set(CV_CAP_PROP_FPS, 30);
}
コード例 #12
0
bool readParameters(int argc, char** argv)
{
    
    if (argc<4) {
        usage();
        return false;
    }
    
    TheInputVideo=argv[1];
    
    /*   // read input video
    if (TheInputVideo=="live") TheVideoCapturer.open(0);
    else TheVideoCapturer.open(argv[1]);
    if (!TheVideoCapturer.isOpened())
    {
        cerr<<"Could not open video"<<endl;
        return false;
    }*/
    
    //read from camera
    if (TheInputVideo=="live") TheVideoCapturer.open(0);
    else TheVideoCapturer.open(TheInputVideo);
    if (!TheVideoCapturer.isOpened())
    {
        cerr<<"Could not open video"<<endl;
        return -1;
    }
    
    // read intrinsic file
    try {
        CameraParams.readFromXMLFile(argv[2]);
        } catch (std::exception &ex) {
        cout<<ex.what()<<endl;
        return false;
    }
    
    // read board file
    try {
        TheBoardConfig.readFromFile(argv[3]);
        } catch (std::exception &ex) {
        cout<<ex.what()<<endl;
        return false;
    }
    
    if(argc>4) TheMarkerSize=atof(argv[4]);
    else TheMarkerSize=1.;
    return true;
}
コード例 #13
0
 bool read(cv::Mat &frame){
     if(capture_type==0){
         return videoCapture.read(frame);
     }else{
         return imageCapture.read(frame);
     }
 };
コード例 #14
0
 bool isOpened(){
     if(capture_type==0){
         return videoCapture.isOpened();
     }else{
         return imageCapture.isOpened();
     }
 };
コード例 #15
0
ファイル: main.cpp プロジェクト: michiroooo/cppcv
 VideoCaptureManager()
 : cap_(0)
 {
     if(!cap_.isOpened()){
         std::cerr << "failed to open" << std::endl;
     }
 }
コード例 #16
0
            void fx(ForParameter for_param,  cv::VideoCapture & capture_dx){
                UMat *frame_dx = new UMat[farm_size];
                UMat image_dx;
                Mat *canvas_dx = new Mat[farm_size];
                double scale_dx = 1;

                CascadeClassifier *cascade_dx = new CascadeClassifier[farm_size];

                for(int i = 0; i < farm_size; i ++)
                    cascade_dx[i].load(cascadeName);

                cout << "Video dx capturing has been started ..." << endl;
                bool frame_success;
                int count = 0;
                while(count < 220) {    

                    for(int j = 0; j < farm_size; j ++)
                        frame_success = capture_dx.read(frame_dx[j]);
                    
                    if (!frame_success) break;

                    dx(frame_dx, canvas_dx, cascade_dx, scale_dx, count);
                    count ++;
                }
            launch_todo_job(); 
}
コード例 #17
0
ファイル: Director.cpp プロジェクト: dm4/dip-final
bool Director::initCamera()
{
#if SIMULATOR == 1
	if (!capture.isOpened()) {
		cerr << "Error: Can not open camera!" << endl;
		return false;
	}
#else
	fc2Error error = fc2CreateContext(&context);
	if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2CreateContext:" << error << endl;
        return false;
    }        

	unsigned int numCameras = 0;
    error = fc2GetNumOfCameras(context, &numCameras);
    if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2GetNumOfCameras: " << error << endl;
        return false;
    }
    if (numCameras == 0) {
        // No cameras detected
		cerr << "No cameras detected." << endl;
        return false;
    }

    // Get the 0th camera
	fc2PGRGuid guid;
    error = fc2GetCameraFromIndex(context, 0, &guid);
    if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2GetCameraFromIndex: " << error << endl;
        return false;
    }    

    error = fc2Connect(context, &guid);
    if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2Connect: " << error << endl;
        return false;
    }

    error = fc2StartCapture(context);
    if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2StartCapture: " << error << endl;
        return false;
    }

	error = fc2CreateImage(&rawImage);
	if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2CreateImage: " << error << endl;
		return false;
	}
	error = fc2CreateImage(&image);
	if (error != FC2_ERROR_OK) {
		cerr << "Error in fc2CreateImage: " << error << endl;
		return false;
	}
#endif

	return true;
}
コード例 #18
0
ファイル: main.cpp プロジェクト: peterborkuti/opencv
void openCamera(cv::VideoCapture& cap, int argc, const char** argv) {
	cv::CommandLineParser parser(argc, argv, keys);
	int camNum = parser.get<int>("1");

	cap.open(camNum);

	if (!cap.isOpened()) {
		//help();
		std::cout << "***Could not initialize capturing...***\n";
		std::cout << "Current parameter's value: \n";
		exit (-1);
	}

	cap.set(CV_CAP_PROP_FRAME_WIDTH, 160);
	cap.set(CV_CAP_PROP_FRAME_HEIGHT, 120);
}
コード例 #19
0
 void open(string name, int type){
    capture_type = type; 
    if(capture_type==0){
         videoCapture.open(name);
    }else{
        imageCapture.open(name);
    }
 };
コード例 #20
0
void takePictureBase(){
	//printf("picture base\n");
	cv::Mat image2(720, 1280, CV_8UC3);
	cap.read(image2);
	image2.copyTo(baseFrame);
	//cap.read(image2);
	//image2.copyTo(baseFrame);
}
コード例 #21
0
void OpenCVTemplateApp::update()
{
    if(currentState == PLAY) {
        frameIndex = video.get(cv::CAP_PROP_POS_FRAMES);
        frameIndex += frameSpeed;
        if(frameIndex >= video.get(cv::CAP_PROP_FRAME_COUNT)-1) {
            frameIndex = 0;
        }
        video.set(cv::CAP_PROP_POS_FRAMES,frameIndex);
        video.read(frame);
        if(isGrayScale) {
            cv::cvtColor(frame, frame, cv::COLOR_BGR2GRAY);
            cv::goodFeaturesToTrack(frame, featurePoints, nbOfFeaturePoints, 0.01, 10, cv::Mat(), 3, 0, 0.04);
        }
        frameTexture = gl::Texture::create(fromOcv(frame));
    }
}
コード例 #22
0
void takePictureLaser(){
	//printf("picture laser\n");
	cv::Mat image(720, 1280, CV_8UC3);
	cap.read(image);
	image.copyTo(laserFrame);
	//cap.read(image);
	//image.copyTo(laserFrame);
}
コード例 #23
0
/*
 * Skip n frames returning the n+1 frame. Returns NULL if no frames are
 * available.
 */
bool skipNFrames(cv::VideoCapture capture, int n, cv::Mat *image) {
    for(int i = 0; i < n + 1; i++) {
        if(!capture.read(*image)) {
            return false;
        }
    }

    return true;
}
コード例 #24
0
std::vector<Mat> GetFrames(cv::VideoCapture &cap)
{
	std::vector<Mat> ansvect;
	for(int i=0;;i++)
	{
		//std::cout << i <<"\n";
		cv::Mat frame;
		if (int(cap.get(CV_CAP_PROP_POS_FRAMES)) == int(cap.get(CV_CAP_PROP_FRAME_COUNT)))
			break;
		//std::cout << cap.get(CV_CAP_PROP_POS_FRAMES) <<"\t"<<cap.get(CV_CAP_PROP_FRAME_COUNT) <<"\n";
		if (!cap.read(frame))             
			break;
		ansvect.push_back(frame);
		//cv::imshow("window", frame);
		//char key = cvWaitKey(0);
	}
	return ansvect;
}
コード例 #25
0
ファイル: camera.cpp プロジェクト: LearningProcess/PEWSHOOM
//-----------------------------------------------------------------------------
void ShutdownCamera (const char sCameraImg[])
{
/*capDriverDisconnect(hgCapWnd);
lprintf("\nDelete temporary file %s\n", sCameraImg);
unlink(sCameraImg);*/
	
	cvDestroyAllWindows();
	cam.release();
}
コード例 #26
0
ファイル: camExample.cpp プロジェクト: Overdr0ne/motld
void Init(cv::VideoCapture& capture)
{
  if(!capture.isOpened()){
    std::cout << "error starting video capture" << std::endl;
    exit(0);
  }
  //propose a resolution
  capture.set(CV_CAP_PROP_FRAME_WIDTH, RESOLUTION_X);
  capture.set(CV_CAP_PROP_FRAME_HEIGHT, RESOLUTION_Y);
  //get the actual (supported) resolution
  ivWidth = capture.get(CV_CAP_PROP_FRAME_WIDTH);
  ivHeight = capture.get(CV_CAP_PROP_FRAME_HEIGHT);
  std::cout << "camera/video resolution: " << ivWidth << "x" << ivHeight << std::endl;

  cv::namedWindow("MOCTLD", 0); //CV_WINDOW_AUTOSIZE );
  // cv::resizeWindow("MOCTLD", ivWidth, ivHeight);
  cv::setMouseCallback("MOCTLD", MouseHandler);
}
コード例 #27
0
void VirtualKinect::showVideoInfo(cv::VideoCapture& video)
{
#if (defined WIN32 || defined _WIN32 || defined WINCE) // for Windows
    SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE),FOREGROUND_INTENSITY | FOREGROUND_GREEN);
#endif

    std::cout << cv::format("frame count:%.0f, size (%.0f,%.0f), fps:%.2f, fourcc:",
        video.get(CV_CAP_PROP_FRAME_COUNT),
        video.get(CV_CAP_PROP_FRAME_WIDTH),
        video.get(CV_CAP_PROP_FRAME_HEIGHT),
        video.get(CV_CAP_PROP_FPS));

    int ex = static_cast<int>(video.get(CV_CAP_PROP_FOURCC));     // Get Codec Type- Int form
    char EXT[] = {(char)(ex & 0XFF) , (char)((ex & 0XFF00) >> 8),(char)((ex & 0XFF0000) >> 16),(char)((ex & 0XFF000000) >> 24), 0};
    std::cout << EXT << std::endl << std::endl;

#if (defined WIN32 || defined _WIN32 || defined WINCE) // for Windows
    SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE),FOREGROUND_INTENSITY | FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE);
#endif
}
コード例 #28
0
ファイル: main.cpp プロジェクト: quiquealon/arquitectura
int main() {
    if (!camera.isOpened()) return 1;
	int keyCheck = 0;
	resetTimer();
	MouseGlove.setCenterHSV(140,161,145);
	MouseGlove.setLeftHSV(96,68,118);
	MouseGlove.setRightHSV(38,205,246);
	MouseGlove.setScrollHSV(63,144,204);
	MouseGlove.setCenterColorThreshold(45);
	MouseGlove.setLeftColorThreshold(0);
	MouseGlove.setRightColorThreshold(25);
	MouseGlove.setScrollColorThreshold(0);
	MouseGlove.setScrollAnchorYCoordinate(240);
	MouseGlove.setMinArea(100);
	leftClickStatus = false;
	rightClickStatus = false;
    while(cv::waitKey(10) != 13) {
		if (!camera.read(image)) return 1;
		cv::flip(image,image,1);
		MouseGlove.processCenterMarker(image);
		MouseGlove.processLeftMarker(image);
		MouseGlove.processRightMarker(image);
		MouseGlove.processScrollMarker(image);
		if (getTime() > 0.3) {
			coordenadasMouse = MouseGlove.getCenterMarkerCoordinates();
			MouseGlove.calibrateCoordinates(coordenadasMouse);
		}
		if (MouseGlove.mouseDetected()) {
			MouseGlove.moveMouse(coordenadasMouse);
	//	}
		//if (MouseGlove.getLeftClickStatus() != leftClickStatus)
		//	toggleLeftClick();
		//if (MouseGlove.getRightClickStatus() != rightClickStatus)
		//	toggleRightClick();
	//	if (MouseGlove.scrollDetected()) {
		//	MouseGlove.scroll(coordenadasMouse,MouseGlove.getScrollSpeed());
	//	}
		MouseGlove.displayMouseStatus(image);
		cv::imshow("Mouse",image);
    }
}
コード例 #29
0
ファイル: main.cpp プロジェクト: Goldenchest/MouseGlove
int main() {
    if (!camera.isOpened()) return 1;
	int keyCheck = 0;
	resetTimer();
	MouseGlove.setCenterHSV(104,238,203);
	MouseGlove.setLeftHSV(150,150,232);
	MouseGlove.setRightHSV(15,205,246);
	MouseGlove.setScrollHSV(63,144,204);
	MouseGlove.setCenterColorThreshold(50);
	MouseGlove.setLeftColorThreshold(35);
	MouseGlove.setRightColorThreshold(25);
	MouseGlove.setScrollColorThreshold(30);
	MouseGlove.setScrollAnchorYCoordinate(240);
	MouseGlove.setMinArea(100);
	leftClickStatus = false;
	rightClickStatus = false;
    while(cv::waitKey(10) != 13) {
		if (!camera.read(image)) return 1;
		cv::flip(image,image,1);
		MouseGlove.processCenterMarker(image);
		MouseGlove.processLeftMarker(image);
		MouseGlove.processRightMarker(image);
		MouseGlove.processScrollMarker(image);
		if (getTime() > 0.3) {
			mouseCoordinates = MouseGlove.getCenterMarkerCoordinates();
			MouseGlove.calibrateCoordinates(mouseCoordinates);
		}
		if (MouseGlove.mouseDetected()) {
			MouseGlove.moveMouse(mouseCoordinates);
		}
		if (MouseGlove.getLeftClickStatus() != leftClickStatus)
			toggleLeftClick();
		if (MouseGlove.getRightClickStatus() != rightClickStatus)
			toggleRightClick();
		if (MouseGlove.scrollDetected()) {
			MouseGlove.scroll(mouseCoordinates,MouseGlove.getScrollSpeed());
		}
		MouseGlove.displayMouseStatus(image);
		cv::imshow("Mouse Glove",image);
    }
}
コード例 #30
0
ファイル: initParams.cpp プロジェクト: Ragesam/Thesis
void initParams::readAndSet(cv::VideoCapture& cap, char* input){
	
	cv::FileStorage fs(input, cv::FileStorage::READ);
	if(fs.isOpened()){
		fs.open(input, cv::FileStorage::READ);	
		fs["frameWidth"] 	>> frameWidth;
		fs["frameHeight"] 	>> frameHeight;
		fs["fps"]		>> fps;
		fs["camId"]		>> camId;
		fs["generateData"]	>> genData;
		fs["visualizeData"]	>> visualize;
		fs["writeVideo"]	>> writeVideo;
		fs["generatePerfData"]	>> genPerfData;
		fs["videoFile"]		>> videoFile;
		fs.release();
		cap.open(videoFile);
		if(!cap.isOpened()){
			std::cerr << "Could not access video file: " << videoFile << std::endl;
			exit(EXIT_FAILURE);
		}
	}