예제 #1
0
/**
 * Callback function for the rgb image. Saves each image received
 * as a message on the rgb video, using the rgb video writer.
 *
 * @param msg a ROS image message.
 */
void rgbCallback(const sensor_msgs::ImageConstPtr &msg)
{
    cv_bridge::CvImagePtr cv_ptr;

    // Copies the image data to cv_ptr and handles exceptions
    try
    {
        cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    }
    catch (cv_bridge::Exception &e)
    {
        ROS_ERROR("cv_bridge exception: %s", e.what());
        return;
    }

    // Check for invalid image
    if (!(cv_ptr->image.rows) || !(cv_ptr->image.cols))
    {
        ROS_ERROR("cv_ptr error: invalid image frame received");
        exit(-1);
    }

    // Opens the rgb video writer if it's not opened yet.
    if (!is_open_rgb)
    {
        std::string rgb_name = ros::package::getPath("vision") + "/data/rgb_video.avi";
        rgb_writer.open(rgb_name.c_str(), CV_FOURCC('P','I','M','1'),
                        25, cv::Size(cv_ptr->image.cols, cv_ptr->image.rows),true);
        if (!rgb_writer.isOpened()) ROS_ERROR("Error! Could not open rgb video writer!");
        else is_open_rgb = true;
    }
    rgb_writer << cv_ptr->image; // Saves the rgb image on the rgb video.
}
예제 #2
0
            bool recordColor(const QString& file, int fourcc)
            {
                //const int fourcc = CV_FOURCC('L', 'A', 'G', 'S');
                //const int fourcc = CV_FOURCC('X', '2', '6', '4');
                //const int fourcc = CV_FOURCC('Z', 'L', 'I', 'B');
                //const int fourcc = CV_FOURCC('M', 'J', 'P', 'G');
                //const int fourcc = CV_FOURCC_PROMPT;
                //const int fourcc = CV_FOURCC('H', 'F', 'Y', 'U');
                //int fourcc = CV_FOURCC('H', 'F', 'Y', 'U');

                colorFile = (file + ".avi").toStdString();

                colorWriter.open(
                    colorFile,
                    fourcc,
                    30.0,
                    cv::Size(ColorFrame::WIDTH, ColorFrame::HEIGHT),
                    true
                );

                if (colorWriter.isOpened()) {
                    colorBuffer.create(cv::Size(ColorFrame::WIDTH, ColorFrame::HEIGHT), CV_8UC3);
                    colorFrame = new ColorFrame();
                    return true;
                }
                colorFile.clear();
                return false;
            }
예제 #3
0
	void operator()(const cv::Mat& image,bool record)
	{
		// no image no fun
		if (image.empty()) return;

		int cF = currentFrame ? 1 : 0;
		int pF = currentFrame ? 0 : 1;

		// resize the image
		cv::resize(image,frames[cF],cv::Size(image.cols*scale,image.rows*scale),0,0,cv::INTER_LINEAR);

		std::cout << "Current: " << cF << " Previous: " << pF << std::endl;
		std::cout << "Image " << image.size() << " c:" << image.channels() << " > " << frames[cF].size() << std::endl;

		if (record && !output.isOpened())
		{
			// fourcc
			int fourCC = FourCC<'X','V','I','D'>::value;
			// set framerate to 1fps - easier to check in a standard video player
			if (output.open("flow.avi",fourCC,25,frames[cF].size(),false))
				{
					std::cout << "capture file opened" << std::endl;
				}		
		}

		// make a copy for the initial frame
		if (frames[pF].empty())
			frames[cF].copyTo(frames[pF]);

		if (!flow.empty())
			flow = cv::Mat::zeros(flow.size(),flow.type());

		// calculate dense optical flow
		cv::calcOpticalFlowFarneback(frames[pF],frames[cF],flow,.5,2,8,3,7,1.5,0);

		// we can't draw into the frame!
		cv::Mat outImg = frames[cF].clone();

		drawOptFlowMap(flow,outImg,8,cv::Scalar::all(255));

		cv::imshow("Flow",outImg);

		// flip the buffers
		currentFrame = !currentFrame;
		
		// record the frame
		if (record && output.isOpened())
			output.write(outImg);
		
	}
예제 #4
0
            bool recordDepth(const QString& file, int fourcc)
            {
                depthFile = (file + ".avi").toStdString();

                depthWriter.open(
                    depthFile,
                    fourcc,
                    30.0,
                    cv::Size(DepthFrame::WIDTH, DepthFrame::HEIGHT),
                    true
                );

                if (depthWriter.isOpened()) {
                    depthBuffer.create(cv::Size(DepthFrame::WIDTH, DepthFrame::HEIGHT), CV_8UC3);
                    depthFrame = new DepthFrame();
                    return true;
                }
                depthFile.clear();
                return false;
            }
int main ( int argc,char** argv )
{
  ros::init ( argc,argv,"MultiLaneDetectionNode" );
  ros::NodeHandle nh_;
  cv::namedWindow(OPENCV_WINDOW);
  if ( argc>5 ) {
    time_t rawTime;
    struct tm * timeInfo;
    char buffer[80];
    std::time ( &rawTime );
    timeInfo = std::localtime ( &rawTime );
    std::strftime ( buffer,80,"%F-%H-%M-%S",timeInfo );
    std::string time ( buffer );
    std::string suffix = "_"+time+".avi";
    vidRec.open ( argv[5]+suffix,CV_FOURCC ( 'M','P','4','V' ),20,cv::Size ( 640,480 ) );
  }

  caffe::Caffe::SetDevice(0);
  caffe::Caffe::set_mode(caffe::Caffe::GPU);
  caffe::Caffe::set_phase(caffe::Caffe::TEST);
  caffe_net = new caffe::Net<float>(argv[1]);
  caffe_net->CopyTrainedLayersFrom(argv[2]);
  mean_image = cv::imread(argv[3]);

  std::vector<cv::Mat> splitChannels_mean;
  cv::split(mean_image,splitChannels_mean);
  float *mean_data = mean.mutable_cpu_data();
  int frameSize = mean_image.cols*mean_image.rows;
  for(int i = 0;i<3;i++){
    std::copy(splitChannels_mean[i].data, splitChannels_mean[i].data+frameSize, (float*)mean_data+frameSize*i);
  }

  ros::Subscriber sub = nh_.subscribe ( argv[4],1,caffeCallback );
  delay_pub = nh_.advertise<lane_detection::Delay> ( "multilane_detection/Delay",100 );
  ros::spin();
  cv::destroyWindow ( OPENCV_WINDOW );
  delete caffe_net;
  return 0;
}
int main(int argc,char **argv)
{
    try
    {
        if (  readArguments (argc,argv)==false) return 0;
//parse arguments
        TheBoardConfig.readFromFile(TheBoardConfigFile);
        //read from camera or from  file
        if (TheInputVideo=="live") {
            TheVideoCapturer.open(0);
            waitTime=10;
        }
        else TheVideoCapturer.open(TheInputVideo);
        //check video is open
        if (!TheVideoCapturer.isOpened()) {
            cerr<<"Could not open video"<<endl;
            return -1;

        }

        //read first image to get the dimensions
        TheVideoCapturer>>TheInputImage;

        //Open outputvideo
        if ( TheOutVideoFilePath!="")
            VWriter.open(TheOutVideoFilePath,CV_FOURCC('M','J','P','G'),15,TheInputImage.size());

        //read camera parameters if passed
        if (TheIntrinsicFile!="") {
            TheCameraParameters.readFromXMLFile(TheIntrinsicFile);
            TheCameraParameters.resize(TheInputImage.size());
        }

        //Create gui

        cv::namedWindow("thres",1);
        cv::namedWindow("in",1);
	TheBoardDetector.setParams(TheBoardConfig,TheCameraParameters,TheMarkerSize);
	TheBoardDetector.getMarkerDetector().getThresholdParams( ThresParam1,ThresParam2);
	TheBoardDetector.getMarkerDetector().enableErosion(true);//for chessboards
        iThresParam1=ThresParam1;
        iThresParam2=ThresParam2;
        cv::createTrackbar("ThresParam1", "in",&iThresParam1, 13, cvTackBarEvents);
        cv::createTrackbar("ThresParam2", "in",&iThresParam2, 13, cvTackBarEvents);
        char key=0;
        int index=0;
        //capture until press ESC or until the end of the video
        while ( key!=27 && TheVideoCapturer.grab())
        {
            TheVideoCapturer.retrieve( TheInputImage);
            TheInputImage.copyTo(TheInputImageCopy);
            index++; //number of images captured
            double tick = (double)getTickCount();//for checking the speed
            //Detection of the board
            float probDetect=TheBoardDetector.detect(TheInputImage);
            //chekc the speed by calculating the mean speed of all iterations
            AvrgTime.first+=((double)getTickCount()-tick)/getTickFrequency();
            AvrgTime.second++;
            cout<<"Time detection="<<1000*AvrgTime.first/AvrgTime.second<<" milliseconds"<<endl;
            //print marker borders
            for (unsigned int i=0;i<TheBoardDetector.getDetectedMarkers().size();i++)
                TheBoardDetector.getDetectedMarkers()[i].draw(TheInputImageCopy,Scalar(0,0,255),1);

            //print board
             if (TheCameraParameters.isValid()) {
                if ( probDetect>0.2)   {
                    CvDrawingUtils::draw3dAxis( TheInputImageCopy,TheBoardDetector.getDetectedBoard(),TheCameraParameters);
                    //draw3dBoardCube( TheInputImageCopy,TheBoardDetected,TheIntriscCameraMatrix,TheDistorsionCameraParams);
                }
            }
            //DONE! Easy, right?

            //show input with augmented information and  the thresholded image
            cv::imshow("in",TheInputImageCopy);
            cv::imshow("thres",TheBoardDetector.getMarkerDetector().getThresholdedImage());
            //write to video if required
            if (  TheOutVideoFilePath!="") {
                //create a beautiful compiosed image showing the thresholded
                //first create a small version of the thresholded image
                cv::Mat smallThres;
                cv::resize( TheBoardDetector.getMarkerDetector().getThresholdedImage(),smallThres,cvSize(TheInputImageCopy.cols/3,TheInputImageCopy.rows/3));
                cv::Mat small3C;
                cv::cvtColor(smallThres,small3C,CV_GRAY2BGR);
                cv::Mat roi=TheInputImageCopy(cv::Rect(0,0,TheInputImageCopy.cols/3,TheInputImageCopy.rows/3));
                small3C.copyTo(roi);
                VWriter<<TheInputImageCopy;
// 			 cv::imshow("TheInputImageCopy",TheInputImageCopy);

            }

            key=cv::waitKey(waitTime);//wait for key to be pressed
            processKey(key);
        }


    } catch (std::exception &ex)

    {
        cout<<"Exception :"<<ex.what()<<endl;
    }

}
예제 #7
0
파일: main.cpp 프로젝트: robotology/yarp
    void run() override
    {
        buf.lock();
        unsigned int sz=buf.size(); //!!! access to size must be protected: problem spotted with Linux stl
        buf.unlock();

        // each 10 seconds it issues a writeToDisk command straightaway
        bool writeToDisk=false;
        double curTime=Time::now();
        if ((curTime-oldTime>10.0) || closing)
        {
            writeToDisk=sz>0;
            oldTime=curTime;
        }

        // it performs the writeToDisk on command or as soon as
        // the queue size is greater than the given threshold
        if ((sz>blockSize) || writeToDisk)
        {
        #ifdef ADD_VIDEO
            // extract images parameters just once
            if (doImgParamsExtraction && (sz>1))
            {
                buf.lock();
                DumpItem itemFront=buf.front();
                DumpItem itemEnd=buf.back();
                buf.unlock();

                int fps;
                int frameW=((IplImage*)itemEnd.obj->getPtr())->width;
                int frameH=((IplImage*)itemEnd.obj->getPtr())->height;

                t0=itemFront.timeStamp.getStamp();
                double dt=itemEnd.timeStamp.getStamp()-t0;
                if (dt<=0.0)
                    fps=25; // default
                else
                    fps=int(double(sz-1)/dt);

                videoWriter.open(videoFile.c_str(),CV_FOURCC('H','F','Y','U'),
                                 fps,cvSize(frameW,frameH),true);

                doImgParamsExtraction=false;
                doSaveFrame=true;
            }
        #endif

            // save to disk
            for (unsigned int i=0; i<sz; i++)
            {
                buf.lock();
                DumpItem item=buf.front();
                buf.pop_front();
                buf.unlock();

                fdata << item.seqNumber << ' ' << item.timeStamp.getString() << ' ';
                if (saveData)
                    fdata << item.obj->toFile(dirName,counter++) << endl;
                else
                {
                    ostringstream frame;
                    frame << "frame_" << setw(8) << setfill('0') << counter++;
                    fdata << frame.str() << endl;
                }

            #ifdef ADD_VIDEO
                if (doSaveFrame)
                {
                    cv::Mat img=cv::cvarrToMat((IplImage*)item.obj->getPtr());
                    videoWriter<<img;

                    // write the timecode of the frame
                    int dt=(int)(1000.0*(item.timeStamp.getStamp()-t0));
                    ftimecodes << dt << endl;
                }
            #endif

                delete item.obj;
            }

            cumulSize+=sz;
            yInfo() << sz << " items stored [cumul #: " << cumulSize << "]";
        }
    }