示例#1
0
/**
 * Callback function for the rgb image. Saves each image received
 * as a message on the rgb video, using the rgb video writer.
 *
 * @param msg a ROS image message.
 */
void rgbCallback(const sensor_msgs::ImageConstPtr &msg)
{
    cv_bridge::CvImagePtr cv_ptr;

    // Copies the image data to cv_ptr and handles exceptions
    try
    {
        cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception &e)
    {
        ROS_ERROR("cv_bridge exception: %s", e.what());
        return;
    }

    // Check for invalid image
    if (!(cv_ptr->image.rows) || !(cv_ptr->image.cols))
    {
        ROS_ERROR("cv_ptr error: invalid image frame received");
        exit(-1);
    }

    // Opens the rgb video writer if it's not opened yet.
    if (!is_open_rgb)
    {
        std::string rgb_name = ros::package::getPath("vision") + "/data/rgb_video.avi";
        rgb_writer.open(rgb_name.c_str(), CV_FOURCC('P','I','M','1'),
                        25, cv::Size(cv_ptr->image.cols, cv_ptr->image.rows),true);
        if (!rgb_writer.isOpened()) ROS_ERROR("Error! Could not open rgb video writer!");
        else is_open_rgb = true;
    }
    rgb_writer << cv_ptr->image; // Saves the rgb image on the rgb video.
}
	~DetectFromVideo()
	{
		if (vWriter.isOpened())
		{
			vWriter.release();
		}
	};
示例#3
0
            bool recordColor(const QString& file, int fourcc)
            {
                //const int fourcc = CV_FOURCC('L', 'A', 'G', 'S');
                //const int fourcc = CV_FOURCC('X', '2', '6', '4');
                //const int fourcc = CV_FOURCC('Z', 'L', 'I', 'B');
                //const int fourcc = CV_FOURCC('M', 'J', 'P', 'G');
                //const int fourcc = CV_FOURCC_PROMPT;
                //const int fourcc = CV_FOURCC('H', 'F', 'Y', 'U');
                //int fourcc = CV_FOURCC('H', 'F', 'Y', 'U');

                colorFile = (file + ".avi").toStdString();

                colorWriter.open(
                    colorFile,
                    fourcc,
                    30.0,
                    cv::Size(ColorFrame::WIDTH, ColorFrame::HEIGHT),
                    true
                );

                if (colorWriter.isOpened()) {
                    colorBuffer.create(cv::Size(ColorFrame::WIDTH, ColorFrame::HEIGHT), CV_8UC3);
                    colorFrame = new ColorFrame();
                    return true;
                }
                colorFile.clear();
                return false;
            }
示例#4
0
 ~StreamRecorder()
 {
     colorWriter.release();
     depthWriter.release();
     skeletonWriter.close();
     if (colorFrame != nullptr) delete colorFrame;
     if (depthFrame != nullptr) delete depthFrame;
     if (skeletonFrame != nullptr) delete skeletonFrame;
 }
示例#5
0
	void operator()(const cv::Mat& image,bool record)
	{
		// no image no fun
		if (image.empty()) return;

		int cF = currentFrame ? 1 : 0;
		int pF = currentFrame ? 0 : 1;

		// resize the image
		cv::resize(image,frames[cF],cv::Size(image.cols*scale,image.rows*scale),0,0,cv::INTER_LINEAR);

		std::cout << "Current: " << cF << " Previous: " << pF << std::endl;
		std::cout << "Image " << image.size() << " c:" << image.channels() << " > " << frames[cF].size() << std::endl;

		if (record && !output.isOpened())
		{
			// fourcc
			int fourCC = FourCC<'X','V','I','D'>::value;
			// set framerate to 1fps - easier to check in a standard video player
			if (output.open("flow.avi",fourCC,25,frames[cF].size(),false))
				{
					std::cout << "capture file opened" << std::endl;
				}		
		}

		// make a copy for the initial frame
		if (frames[pF].empty())
			frames[cF].copyTo(frames[pF]);

		if (!flow.empty())
			flow = cv::Mat::zeros(flow.size(),flow.type());

		// calculate dense optical flow
		cv::calcOpticalFlowFarneback(frames[pF],frames[cF],flow,.5,2,8,3,7,1.5,0);

		// we can't draw into the frame!
		cv::Mat outImg = frames[cF].clone();

		drawOptFlowMap(flow,outImg,8,cv::Scalar::all(255));

		cv::imshow("Flow",outImg);

		// flip the buffers
		currentFrame = !currentFrame;
		
		// record the frame
		if (record && output.isOpened())
			output.write(outImg);
		
	}
示例#6
0
void Tracker2D::writeToVideo(cv::VideoWriter & writer)
{

	cv::Mat dispImage;
	drawTrackers(dispImage);
	writer.write(dispImage);
}
示例#7
0
            bool recordDepth(const QString& file, int fourcc)
            {
                depthFile = (file + ".avi").toStdString();

                depthWriter.open(
                    depthFile,
                    fourcc,
                    30.0,
                    cv::Size(DepthFrame::WIDTH, DepthFrame::HEIGHT),
                    true
                );

                if (depthWriter.isOpened()) {
                    depthBuffer.create(cv::Size(DepthFrame::WIDTH, DepthFrame::HEIGHT), CV_8UC3);
                    depthFrame = new DepthFrame();
                    return true;
                }
                depthFile.clear();
                return false;
            }
int main ( int argc,char** argv )
{
  ros::init ( argc,argv,"MultiLaneDetectionNode" );
  ros::NodeHandle nh_;
  cv::namedWindow(OPENCV_WINDOW);
  if ( argc>5 ) {
    time_t rawTime;
    struct tm * timeInfo;
    char buffer[80];
    std::time ( &rawTime );
    timeInfo = std::localtime ( &rawTime );
    std::strftime ( buffer,80,"%F-%H-%M-%S",timeInfo );
    std::string time ( buffer );
    std::string suffix = "_"+time+".avi";
    vidRec.open ( argv[5]+suffix,CV_FOURCC ( 'M','P','4','V' ),20,cv::Size ( 640,480 ) );
  }

  caffe::Caffe::SetDevice(0);
  caffe::Caffe::set_mode(caffe::Caffe::GPU);
  caffe::Caffe::set_phase(caffe::Caffe::TEST);
  caffe_net = new caffe::Net<float>(argv[1]);
  caffe_net->CopyTrainedLayersFrom(argv[2]);
  mean_image = cv::imread(argv[3]);

  std::vector<cv::Mat> splitChannels_mean;
  cv::split(mean_image,splitChannels_mean);
  float *mean_data = mean.mutable_cpu_data();
  int frameSize = mean_image.cols*mean_image.rows;
  for(int i = 0;i<3;i++){
    std::copy(splitChannels_mean[i].data, splitChannels_mean[i].data+frameSize, (float*)mean_data+frameSize*i);
  }

  ros::Subscriber sub = nh_.subscribe ( argv[4],1,caffeCallback );
  delay_pub = nh_.advertise<lane_detection::Delay> ( "multilane_detection/Delay",100 );
  ros::spin();
  cv::destroyWindow ( OPENCV_WINDOW );
  delete caffe_net;
  return 0;
}
示例#9
0
	/**
		動画ファイルに書き込み
	*/
	bool write(){
		if( videoWriter.isOpened()){
			cv::Size size = cv::Size(glutGet(GLUT_WINDOW_WIDTH),glutGet( GLUT_WINDOW_HEIGHT));
			if(size.width == captureImage.cols && size.height == captureImage.rows){
#ifndef USE_TEXTURE_2D
				glReadPixels(0,0, captureImage.cols,captureImage.rows,GL_RGB,GL_UNSIGNED_BYTE,captureImage.data);
#else
				glCopyTexImage2D( GL_TEXTURE_2D,0,GL_RGB,0,0,captureImage.cols,captureImage.rows,0);
				glGetTexImage(GL_TEXTURE_2D,0,GL_RGB,GL_UNSIGNED_BYTE,captureImage.data);
#endif
			}
			else{	//ウィンドウが拡大縮小されている場合
				size.width -= size.width % 4;	//4の倍数にする
				cv::Mat temp(size,CV_8UC3);
#ifndef USE_TEXTURE_2D
				glReadPixels(0,0, size.width,size.height,GL_RGB,GL_UNSIGNED_BYTE,temp.data);
#else
				glCopyTexImage2D( GL_TEXTURE_2D,0,GL_RGB,0,0,size.width,size.height,0);
				glGetTexImage(GL_TEXTURE_2D,0,GL_RGB,GL_UNSIGNED_BYTE,temp.data);
#endif
				cv::resize(temp,captureImage,captureImage.size(),0.0,0.0,interpolation);
			}
			cvtColor(captureImage,captureImage,CV_RGB2BGR);   
			flip(captureImage,captureImage,0);
			videoWriter << captureImage;
			//char key = cv::waitKey(1);
			//std::ostringstream oss;
			//oss << name << count << ".bmp";
			//if(key == 's'){
			//	cv::imwrite(oss.str(), captureImage);
			//	count++;
			//}
			return true;

		}
		else
			return false;
	}
int main(int argc,char **argv)
{
    try
    {
        if (  readArguments (argc,argv)==false) return 0;
//parse arguments
        TheBoardConfig.readFromFile(TheBoardConfigFile);
        //read from camera or from  file
        if (TheInputVideo=="live") {
            TheVideoCapturer.open(0);
            waitTime=10;
        }
        else TheVideoCapturer.open(TheInputVideo);
        //check video is open
        if (!TheVideoCapturer.isOpened()) {
            cerr<<"Could not open video"<<endl;
            return -1;

        }

        //read first image to get the dimensions
        TheVideoCapturer>>TheInputImage;

        //Open outputvideo
        if ( TheOutVideoFilePath!="")
            VWriter.open(TheOutVideoFilePath,CV_FOURCC('M','J','P','G'),15,TheInputImage.size());

        //read camera parameters if passed
        if (TheIntrinsicFile!="") {
            TheCameraParameters.readFromXMLFile(TheIntrinsicFile);
            TheCameraParameters.resize(TheInputImage.size());
        }

        //Create gui

        cv::namedWindow("thres",1);
        cv::namedWindow("in",1);
	TheBoardDetector.setParams(TheBoardConfig,TheCameraParameters,TheMarkerSize);
	TheBoardDetector.getMarkerDetector().getThresholdParams( ThresParam1,ThresParam2);
	TheBoardDetector.getMarkerDetector().enableErosion(true);//for chessboards
        iThresParam1=ThresParam1;
        iThresParam2=ThresParam2;
        cv::createTrackbar("ThresParam1", "in",&iThresParam1, 13, cvTackBarEvents);
        cv::createTrackbar("ThresParam2", "in",&iThresParam2, 13, cvTackBarEvents);
        char key=0;
        int index=0;
        //capture until press ESC or until the end of the video
        while ( key!=27 && TheVideoCapturer.grab())
        {
            TheVideoCapturer.retrieve( TheInputImage);
            TheInputImage.copyTo(TheInputImageCopy);
            index++; //number of images captured
            double tick = (double)getTickCount();//for checking the speed
            //Detection of the board
            float probDetect=TheBoardDetector.detect(TheInputImage);
            //chekc the speed by calculating the mean speed of all iterations
            AvrgTime.first+=((double)getTickCount()-tick)/getTickFrequency();
            AvrgTime.second++;
            cout<<"Time detection="<<1000*AvrgTime.first/AvrgTime.second<<" milliseconds"<<endl;
            //print marker borders
            for (unsigned int i=0;i<TheBoardDetector.getDetectedMarkers().size();i++)
                TheBoardDetector.getDetectedMarkers()[i].draw(TheInputImageCopy,Scalar(0,0,255),1);

            //print board
             if (TheCameraParameters.isValid()) {
                if ( probDetect>0.2)   {
                    CvDrawingUtils::draw3dAxis( TheInputImageCopy,TheBoardDetector.getDetectedBoard(),TheCameraParameters);
                    //draw3dBoardCube( TheInputImageCopy,TheBoardDetected,TheIntriscCameraMatrix,TheDistorsionCameraParams);
                }
            }
            //DONE! Easy, right?

            //show input with augmented information and  the thresholded image
            cv::imshow("in",TheInputImageCopy);
            cv::imshow("thres",TheBoardDetector.getMarkerDetector().getThresholdedImage());
            //write to video if required
            if (  TheOutVideoFilePath!="") {
                //create a beautiful compiosed image showing the thresholded
                //first create a small version of the thresholded image
                cv::Mat smallThres;
                cv::resize( TheBoardDetector.getMarkerDetector().getThresholdedImage(),smallThres,cvSize(TheInputImageCopy.cols/3,TheInputImageCopy.rows/3));
                cv::Mat small3C;
                cv::cvtColor(smallThres,small3C,CV_GRAY2BGR);
                cv::Mat roi=TheInputImageCopy(cv::Rect(0,0,TheInputImageCopy.cols/3,TheInputImageCopy.rows/3));
                small3C.copyTo(roi);
                VWriter<<TheInputImageCopy;
// 			 cv::imshow("TheInputImageCopy",TheInputImageCopy);

            }

            key=cv::waitKey(waitTime);//wait for key to be pressed
            processKey(key);
        }


    } catch (std::exception &ex)

    {
        cout<<"Exception :"<<ex.what()<<endl;
    }

}
示例#11
0
	~GLCapture(){
		videoWriter.release();
	}
示例#12
0
文件: main.cpp 项目: robotology/yarp
    void run() override
    {
        buf.lock();
        unsigned int sz=buf.size(); //!!! access to size must be protected: problem spotted with Linux stl
        buf.unlock();

        // each 10 seconds it issues a writeToDisk command straightaway
        bool writeToDisk=false;
        double curTime=Time::now();
        if ((curTime-oldTime>10.0) || closing)
        {
            writeToDisk=sz>0;
            oldTime=curTime;
        }

        // it performs the writeToDisk on command or as soon as
        // the queue size is greater than the given threshold
        if ((sz>blockSize) || writeToDisk)
        {
        #ifdef ADD_VIDEO
            // extract images parameters just once
            if (doImgParamsExtraction && (sz>1))
            {
                buf.lock();
                DumpItem itemFront=buf.front();
                DumpItem itemEnd=buf.back();
                buf.unlock();

                int fps;
                int frameW=((IplImage*)itemEnd.obj->getPtr())->width;
                int frameH=((IplImage*)itemEnd.obj->getPtr())->height;

                t0=itemFront.timeStamp.getStamp();
                double dt=itemEnd.timeStamp.getStamp()-t0;
                if (dt<=0.0)
                    fps=25; // default
                else
                    fps=int(double(sz-1)/dt);

                videoWriter.open(videoFile.c_str(),CV_FOURCC('H','F','Y','U'),
                                 fps,cvSize(frameW,frameH),true);

                doImgParamsExtraction=false;
                doSaveFrame=true;
            }
        #endif

            // save to disk
            for (unsigned int i=0; i<sz; i++)
            {
                buf.lock();
                DumpItem item=buf.front();
                buf.pop_front();
                buf.unlock();

                fdata << item.seqNumber << ' ' << item.timeStamp.getString() << ' ';
                if (saveData)
                    fdata << item.obj->toFile(dirName,counter++) << endl;
                else
                {
                    ostringstream frame;
                    frame << "frame_" << setw(8) << setfill('0') << counter++;
                    fdata << frame.str() << endl;
                }

            #ifdef ADD_VIDEO
                if (doSaveFrame)
                {
                    cv::Mat img=cv::cvarrToMat((IplImage*)item.obj->getPtr());
                    videoWriter<<img;

                    // write the timecode of the frame
                    int dt=(int)(1000.0*(item.timeStamp.getStamp()-t0));
                    ftimecodes << dt << endl;
                }
            #endif

                delete item.obj;
            }

            cumulSize+=sz;
            yInfo() << sz << " items stored [cumul #: " << cumulSize << "]";
        }
    }
示例#13
0
void imgproc(const uint8_t *image, int width, int height)
{
  cv::Mat img(height, width, CV_8UC1, const_cast<uint8_t*>(image), width);
  imshow("Original", img);
  cv::waitKey(1);
  return;

  cv::Mat src = img.clone();
  cv::Mat color_src(height, width, CV_8UC3);
  cvtColor(src, color_src, CV_GRAY2RGB);

  // Image processing starts here
  GaussianBlur(src, src, cv::Size(3,3), 0);
  adaptiveThreshold(src, src, 255, cv::ADAPTIVE_THRESH_GAUSSIAN_C, cv::THRESH_BINARY_INV, 5, 3);
  //equalizeHist(src, src);
  // TODO: Can think about using multiple thresholds and choosing one where
  // we can detect a pattern
  //threshold(src, src, 100, 255, cv::THRESH_BINARY_INV);

  imshow("Thresholded", src);

  std::vector<std::vector<cv::Point> > contours;

  findContours(src, contours, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE);
  //printf("Num contours: %lu\n", contours.size());

  std::vector<double> contour_area, contour_arclength;
  contour_area.resize(contours.size());
  contour_arclength.resize(contours.size());
  std::vector<unsigned int> circle_index;
  for(unsigned int idx = 0; idx < contours.size(); idx++)
  {
    if(contours[idx].size() > 25)
    {
      cv::Mat contour(contours[idx]);
      contour_area[idx] = contourArea(contour);
      if(contour_area[idx] > 50)
      {
        contour_arclength[idx] = arcLength(contour,true);
        float q = 4*M_PI*contour_area[idx] /
            (contour_arclength[idx]*contour_arclength[idx]);
        if(q > 0.8f)
        {
          circle_index.push_back(idx);
          //printf("isoperimetric quotient: %f\n", q);
          //Scalar color( rand()&255, rand()&255, rand()&255 );
          //drawContours(contours_dark, contours, idx, color, 1, 8);
        }
      }
    }
  }
  std::list<Circle> circles;
  for(unsigned int i = 0; i < circle_index.size(); i++)
  {
    Circle c;
    cv::Moments moment = moments(contours[circle_index[i]]);
    float inv_m00 = 1./moment.m00;
    c.center = cv::Point2f(moment.m10*inv_m00, moment.m01*inv_m00);
    c.radius = (sqrtf(contour_area[circle_index[i]]/M_PI) + contour_arclength[circle_index[i]]/(2*M_PI))/2.0f;
    circles.push_back(c);
  }

  // Get the circles with centers close to each other
  std::vector<std::list<Circle> > filtered_circles;
  std::list<Circle>::iterator it = circles.begin();
  unsigned int max_length = 0;
  while(it != circles.end())
  {
    std::list<Circle> c;
    c.push_back(*it);

    cv::Point c1 = it->center;

    std::list<Circle>::iterator it2 = it;
    it2++;
    while(it2 != circles.end())
    {
      cv::Point c2 = it2->center;
      std::list<Circle>::iterator it3 = it2;
      it2++;
      if(hypotf(c2.x - c1.x, c2.y - c1.y) < 10)
      {
        c.push_back(*it3);
        circles.erase(it3);
      }
    }
    unsigned int length_c = c.size();
    if(length_c > 1 && length_c > max_length)
    {
      max_length = length_c;
      filtered_circles.push_back(c);
    }

    it2 = it;
    it++;
    circles.erase(it2);
  }

  if(filtered_circles.size() > 0)
  {
    Circle target_circle;
    target_circle.radius = std::numeric_limits<float>::max();

    for(it = filtered_circles.back().begin(); it != filtered_circles.back().end(); it++)
    {
      //printf("circle: c: %f, %f, r: %f\n", it->center.x, it->center.y, it->radius);
      if(it->radius < target_circle.radius)
      {
        target_circle.radius = it->radius;
        target_circle.center = it->center;
      }
    }
    circle(color_src, cv::Point(target_circle.center.x, target_circle.center.y), target_circle.radius, cv::Scalar(0,0,255), 2);
    printf("target: c: %f, %f, r: %f\n", target_circle.center.x, target_circle.center.y, target_circle.radius);

  }
#if defined(CAPTURE_VIDEO)
  static cv::VideoWriter video_writer("output.mp4", CV_FOURCC('M','J','P','G'), 20, cv::Size(width, height));
  video_writer.write(color_src);
#endif
  imshow("Target", color_src);
  cv::waitKey(1);
}