bool CvCapture_GStreamer::open( int type, const char* filename )
{
    close();
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    if(!isInited) {
//        printf("gst_init\n");
        gst_init (NULL, NULL);

//        gst_debug_set_active(TRUE);
//        gst_debug_set_colored(TRUE);
//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);

        isInited = true;
    }
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    if(type != CV_CAP_GSTREAMER_FILE) {
        close();
        return false;
    }

    if(!gst_uri_is_valid(filename)) {
        uri = realpath(filename, NULL);
        stream=false;
        if(uri) {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        } else {
            GError *err = NULL;
            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                CV_WARN("GStreamer: Error opening bin\n");
                close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    if(!uridecodebin) {
        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        if(!uridecodebin) {
            CV_WARN("GStreamer: Failed to create uridecodebin\n");
            close();
            return false;
        }
    }

    if(manualpipeline) {
        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
	    CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
	    return false;
        }

	pipeline = uridecodebin;
    } else {
	pipeline = gst_pipeline_new (NULL);

        color = gst_element_factory_make("ffmpegcolorspace", NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);

    {
    GstCaps* caps;
    caps = gst_caps_new_simple("video/x-raw-rgb",
			       "red_mask",   G_TYPE_INT, 0x0000FF,
			       "green_mask", G_TYPE_INT, 0x00FF00,
			       "blue_mask",  G_TYPE_INT, 0xFF0000,
			       NULL);
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
       GST_STATE_CHANGE_FAILURE) {
        CV_WARN("GStreamer: unable to set pipeline to ready\n");
        gst_object_unref(pipeline);
        return false;
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
       GST_STATE_CHANGE_FAILURE) {
        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        CV_WARN("GStreamer: unable to set pipeline to playing\n");
        gst_object_unref(pipeline);
        return false;
    }



    handleMessage();

    __END__;

    return true;
}
Example #2
0
static int icvSetProperty_GStreamer(CvCapture_GStreamer *cap, int id, double value)
{
	GstFormat format;
	GstSeekFlags flags;

	if(!cap->pipeline) {
		CV_WARN("GStreamer: no pipeline");
		return 0;
	}

	switch(id) {
	case CV_CAP_PROP_POS_MSEC:
		format = GST_FORMAT_TIME;
		flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
		if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format,
					    flags, (gint64) (value * GST_MSECOND))) {
			CV_WARN("GStreamer: unable to seek");
		}
		break;
	case CV_CAP_PROP_POS_FRAMES:
		format = GST_FORMAT_BUFFERS;
		flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
		if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format,
					    flags, (gint64) value)) {
			CV_WARN("GStreamer: unable to seek");
		}
		break;
	case CV_CAP_PROP_POS_AVI_RATIO:
		format = GST_FORMAT_PERCENT;
		flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
		if(!gst_element_seek_simple(GST_ELEMENT(cap->pipeline), format,
					    flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
			CV_WARN("GStreamer: unable to seek");
		}
		break;
	case CV_CAP_PROP_FRAME_WIDTH:
		if(value > 0)
			icvSetFilter(cap, "width", G_TYPE_INT, (int) value, 0);
		else
			icvRemoveFilter(cap, "width");
		break;
	case CV_CAP_PROP_FRAME_HEIGHT:
		if(value > 0)
			icvSetFilter(cap, "height", G_TYPE_INT, (int) value, 0);
		else
			icvRemoveFilter(cap, "height");
		break;
	case CV_CAP_PROP_FPS:
		if(value > 0) {
			int num, denom;
			num = (int) value;
			if(value != num) { // FIXME this supports only fractions x/1 and x/2
				num = (int) (value * 2);
				denom = 2;
			} else
				denom = 1;

			icvSetFilter(cap, "framerate", GST_TYPE_FRACTION, num, denom);
		} else
			icvRemoveFilter(cap, "framerate");
		break;
	case CV_CAP_PROP_FOURCC:
	case CV_CAP_PROP_FRAME_COUNT:
	case CV_CAP_PROP_FORMAT:
	case CV_CAP_PROP_MODE:
	case CV_CAP_PROP_BRIGHTNESS:
	case CV_CAP_PROP_CONTRAST:
	case CV_CAP_PROP_SATURATION:
	case CV_CAP_PROP_HUE:
	case CV_CAP_PROP_GAIN:
	case CV_CAP_PROP_CONVERT_RGB:
		break;
	default:
		CV_WARN("GStreamer: unhandled property");
	}
	return 0;
}
Example #3
0
static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename)
{
	CvCapture_GStreamer *capture = 0;
	CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

	__BEGIN__;

//	teststreamer(filename);

//	return 0;

	if(!isInited) {
		printf("gst_init\n");
		gst_init (NULL, NULL);

// according to the documentation this is the way to register a plugin now
// unfortunately, it has not propagated into my distribution yet...
// 		gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
// 			"opencv-appsink", "Element application sink",
// 			"0.1", appsink_plugin_init, "LGPL", "highgui", "opencv",
// 			"http://opencvlibrary.sourceforge.net/");

		isInited = true;
	}

	const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"};
//	printf("entered capturecreator %s\n", sourcetypes[type]);

	GstElement *source = gst_element_factory_make(sourcetypes[type], NULL);
	if(!source)
		return 0;

	if(type == CV_CAP_GSTREAMER_FILE)
		g_object_set(G_OBJECT(source), "location", filename, NULL);

	GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL);

	GstElement *sink = gst_element_factory_make("opencv-appsink", NULL);
	GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
//	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(sink), false);
//	g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL);

	GstElement *decodebin = gst_element_factory_make("decodebin", NULL);
	g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour);

	GstElement *pipeline = gst_pipeline_new (NULL);

	gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL);

//	printf("added many\n");

	switch(type) {
	case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps
		caps = gst_caps_new_simple("video/x-raw-rgb",
					   "width", G_TYPE_INT, 640,
					   "height", G_TYPE_INT, 480,
					   "framerate", GST_TYPE_FRACTION, 30, 1,
					   NULL);
		if(!gst_element_link_filtered(source, decodebin, caps)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		gst_caps_unref(caps);
		break;
	case CV_CAP_GSTREAMER_V4L:
	case CV_CAP_GSTREAMER_1394:
	case CV_CAP_GSTREAMER_FILE:
		if(!gst_element_link(source, decodebin)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		break;
	}

	if(!gst_element_link(colour, sink)) {
		CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink\n");
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("linked, pausing\n");

	if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) ==
	   GST_STATE_CHANGE_FAILURE) {
		CV_WARN("GStreamer: unable to set pipeline to paused\n");
//		icvHandleMessage(capture);
//		cvReleaseCapture((CvCapture **)(void *)&capture);
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("state now paused\n");

	// construct capture struct
	capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
	memset(capture, 0, sizeof(CvCapture_GStreamer));
	capture->type = type;
	capture->pipeline = pipeline;
	capture->source = source;
	capture->decodebin = decodebin;
	capture->colour = colour;
	capture->appsink = sink;

	icvHandleMessage(capture);

	OPENCV_ASSERT(capture,
                      "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture");

//	GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline));
//	printf("clock %s\n", gst_object_get_name(GST_OBJECT(clock)));

	__END__;

	return capture;
}
static int icvOpenAVI_FFMPEG( CvCaptureAVI_FFMPEG* capture, const char* filename )
{
    int err, valid = 0, video_index = -1, i;
    AVFormatContext *ic;

    capture->ic = NULL;
    capture->video_stream = -1;
    capture->video_st = NULL;
    /* register all codecs, demux and protocols */
    av_register_all();

    err = av_open_input_file(&ic, filename, NULL, 0, NULL);
    if (err < 0) {
	    CV_WARN("Error opening file");
	    goto exit_func;
    }
    capture->ic = ic;
    err = av_find_stream_info(ic);
    if (err < 0) {
	    CV_WARN("Could not find codec parameters");
	    goto exit_func;
    }
    for(i = 0; i < ic->nb_streams; i++) {
#if LIBAVFORMAT_BUILD > 4628
        AVCodecContext *enc = ic->streams[i]->codec;
#else
        AVCodecContext *enc = &ic->streams[i]->codec;
#endif
        AVCodec *codec;
    if( CODEC_TYPE_VIDEO == enc->codec_type && video_index < 0) {
        video_index = i;
        codec = avcodec_find_decoder(enc->codec_id);
        if (!codec ||
        avcodec_open(enc, codec) < 0)
        goto exit_func;
        capture->video_stream = i;
        capture->video_st = ic->streams[i];
        capture->picture = avcodec_alloc_frame();

        capture->rgb_picture.data[0] = (uchar*)cvAlloc(
                                avpicture_get_size( PIX_FMT_BGR24,
                                enc->width, enc->height ));
        avpicture_fill( (AVPicture*)&capture->rgb_picture, capture->rgb_picture.data[0],
                PIX_FMT_BGR24, enc->width, enc->height );

        cvInitImageHeader( &capture->frame, cvSize( enc->width,
                                   enc->height ), 8, 3, 0, 4 );
        cvSetData( &capture->frame, capture->rgb_picture.data[0],
                           capture->rgb_picture.linesize[0] );
        break;
    }
    }


    if(video_index >= 0)
    valid = 1;

exit_func:

    if( !valid )
        icvCloseAVI_FFMPEG( capture );

    return valid;
}
Example #5
0
static double icvGetProperty_GStreamer(CvCapture_GStreamer *cap, int id)
{
	GstFormat format;
	//GstQuery q;
	gint64 value;

	if(!cap->pipeline) {
		CV_WARN("GStreamer: no pipeline");
		return 0;
	}

	switch(id) {
	case CV_CAP_PROP_POS_MSEC:
		format = GST_FORMAT_TIME;
		if(!gst_element_query_position(cap->pipeline, &format, &value)) {
			CV_WARN("GStreamer: unable to query position of stream");
			return 0;
		}
		return value * 1e-6; // nano seconds to milli seconds
	case CV_CAP_PROP_POS_FRAMES:
		format = GST_FORMAT_BUFFERS;
		if(!gst_element_query_position(cap->pipeline, &format, &value)) {
			CV_WARN("GStreamer: unable to query position of stream");
			return 0;
		}
		return value;
	case CV_CAP_PROP_POS_AVI_RATIO:
		format = GST_FORMAT_PERCENT;
		if(!gst_element_query_position(cap->pipeline, &format, &value)) {
			CV_WARN("GStreamer: unable to query position of stream");
			return 0;
		}
//		printf("value %llu %llu %g\n", value, GST_FORMAT_PERCENT_MAX, ((double) value) / GST_FORMAT_PERCENT_MAX);
		return ((double) value) / GST_FORMAT_PERCENT_MAX;
	case CV_CAP_PROP_FRAME_WIDTH:
	case CV_CAP_PROP_FRAME_HEIGHT:
	case CV_CAP_PROP_FPS:
	case CV_CAP_PROP_FOURCC:
		break;
	case CV_CAP_PROP_FRAME_COUNT:
		format = GST_FORMAT_BUFFERS;
		if(!gst_element_query_duration(cap->pipeline, &format, &value)) {
			CV_WARN("GStreamer: unable to query position of stream");
			return 0;
		}
		return value;
	case CV_CAP_PROP_FORMAT:
	case CV_CAP_PROP_MODE:
	case CV_CAP_PROP_BRIGHTNESS:
	case CV_CAP_PROP_CONTRAST:
	case CV_CAP_PROP_SATURATION:
	case CV_CAP_PROP_HUE:
	case CV_CAP_PROP_GAIN:
	case CV_CAP_PROP_CONVERT_RGB:
		break;
	default:
		CV_WARN("GStreamer: unhandled property");
		break;
	}
	return 0;
}
/* add a video output stream */
static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc, int codec_tag, int w, int h, int bitrate, double fps, int pixel_format)
{
	AVCodecContext *c;
	AVStream *st;
    int codec_id;
	int frame_rate, frame_rate_base;
	AVCodec *codec;
	

	st = av_new_stream(oc, 0);
	if (!st) {
		CV_WARN("Could not allocate stream");
		return NULL;
	}

#if LIBAVFORMAT_BUILD > 4628
	c = st->codec;
#else
	c = &(st->codec);
#endif
#if LIBAVFORMAT_BUILD > 4621 
	codec_id = av_guess_codec(oc->oformat, NULL, oc->filename, NULL, CODEC_TYPE_VIDEO);
#else
	codec_id = oc->oformat->video_codec;
#endif

    if(codec_tag) c->codec_tag=codec_tag;

	c->codec_id = (CodecID) codec_id;
	codec = avcodec_find_encoder(c->codec_id);

	c->codec_type = CODEC_TYPE_VIDEO;

	/* put sample parameters */
	c->bit_rate = bitrate;

	/* resolution must be a multiple of two */
	c->width = w;
	c->height = h;

	/* time base: this is the fundamental unit of time (in seconds) in terms
       of which frame timestamps are represented. for fixed-fps content,
       timebase should be 1/framerate and timestamp increments should be
       identically 1. */
	frame_rate=cvRound(fps);
	frame_rate_base=1;
	while (fabs((double)frame_rate/frame_rate_base) - fps > 0.001){
		frame_rate_base*=10;
		frame_rate=cvRound(fps*frame_rate_base);
	}
#if LIBAVFORMAT_BUILD > 4752
    c->time_base.den = frame_rate;
    c->time_base.num = frame_rate_base;
	/* adjust time base for supported framerates */
	if(codec && codec->supported_framerates){
		const AVRational *p= codec->supported_framerates;
		AVRational req= (AVRational){frame_rate, frame_rate_base};
		const AVRational *best=NULL;
		AVRational best_error= (AVRational){INT_MAX, 1};
		for(; p->den!=0; p++){
			AVRational error= av_sub_q(req, *p);
			if(error.num <0) error.num *= -1;
			if(av_cmp_q(error, best_error) < 0){
				best_error= error;
				best= p;
			}
		}
		c->time_base.den= best->num;
		c->time_base.num= best->den;
	}
#else
	c->frame_rate = frame_rate;
	c->frame_rate_base = frame_rate_base;
#endif

	c->gop_size = 12; /* emit one intra frame every twelve frames at most */
	c->pix_fmt = (PixelFormat) pixel_format;
	if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
        /* just for testing, we also add B frames */
        c->max_b_frames = 2;
    }
    if (c->codec_id == CODEC_ID_MPEG1VIDEO){
        /* needed to avoid using macroblocks in which some coeffs overflow
           this doesnt happen with normal video, it just happens here as the
           motion of the chroma plane doesnt match the luma plane */
        c->mb_decision=2;
    }
    // some formats want stream headers to be seperate
    if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name,  "3gp"))
        c->flags |= CODEC_FLAG_GLOBAL_HEADER;

    return st;
}
Example #7
0
bool CvCapture_GStreamer::setProperty( int propId, double value )
{
       GstFormat format;
    GstSeekFlags flags;

    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }

    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                        flags, (gint64) (value * GST_MSECOND))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                        flags, (gint64) value)) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                        flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_FRAME_WIDTH:
        if(value > 0)
            setFilter("width", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("width");
        break;
    case CV_CAP_PROP_FRAME_HEIGHT:
        if(value > 0)
            setFilter("height", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("height");
        break;
    case CV_CAP_PROP_FPS:
        if(value > 0) {
            int num, denom;
            num = (int) value;
            if(value != num) { // FIXME this supports only fractions x/1 and x/2
                num = (int) (value * 2);
                denom = 2;
            } else
                denom = 1;

            setFilter("framerate", GST_TYPE_FRACTION, num, denom);
        } else
            removeFilter("framerate");
        break;
    case CV_CAP_PROP_FOURCC:
    case CV_CAP_PROP_FRAME_COUNT:
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink)
            break;
        gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
        break;
    default:
        CV_WARN("GStreamer: unhandled property");
    }
    return false;
}
Example #8
0
double CvCapture_GStreamer::getProperty( int propId )
{
    GstFormat format;
    //GstQuery q;
    gint64 value;

    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }

    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        if(!gst_element_query_position(sink, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value * 1e-6; // nano seconds to milli seconds
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        if(!gst_element_query_position(sink, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        if(!gst_element_query_position(pipeline, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return ((double) value) / GST_FORMAT_PERCENT_MAX;
    case CV_CAP_PROP_FRAME_WIDTH:
    case CV_CAP_PROP_FRAME_HEIGHT:
    case CV_CAP_PROP_FPS:
    case CV_CAP_PROP_FOURCC:
        break;
    case CV_CAP_PROP_FRAME_COUNT:
        format = GST_FORMAT_DEFAULT;
        if(!gst_element_query_duration(pipeline, &format, &value)) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink) {
                CV_WARN("GStreamer: there is no sink yet");
                return false;
        }
        return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
    default:
        CV_WARN("GStreamer: unhandled property");
        break;
    }
    return false;
}
Example #9
0
/*!
 * \brief CvCapture_GStreamer::getProperty retreive the requested property from the pipeline
 * \param propId requested property
 * \return property value
 *
 * There are two ways the properties can be retreived. For seek-based properties we can query the pipeline.
 * For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties
 * are not available until a first frame was received
 */
double CvCapture_GStreamer::getProperty( int propId )
{
    GstFormat format;
    gint64 value;
    gboolean status;

#if GST_VERSION_MAJOR == 0
#define FORMAT &format
#else
#define FORMAT format
#endif

    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }

    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value * 1e-6; // nano seconds to milli seconds
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return ((double) value) / GST_FORMAT_PERCENT_MAX;
    case CV_CAP_PROP_FRAME_WIDTH: {
        if (!buffer_caps){
            CV_WARN("GStreamer: unable to query width of frame; no frame grabbed yet");
            return 0;
        }
        GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
        gint width = 0;
        if(!gst_structure_get_int(structure, "width", &width)){
            CV_WARN("GStreamer: unable to query width of frame");
            return 0;
        }
        return width;
        break;
    }
    case CV_CAP_PROP_FRAME_HEIGHT: {
        if (!buffer_caps){
            CV_WARN("GStreamer: unable to query height of frame; no frame grabbed yet");
            return 0;
        }
        GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
        gint height = 0;
        if(!gst_structure_get_int(structure, "height", &height)){
            CV_WARN("GStreamer: unable to query height of frame");
            return 0;
        }
        return height;
        break;
    }
    case CV_CAP_PROP_FPS: {
        if (!buffer_caps){
            CV_WARN("GStreamer: unable to query framerate of stream; no frame grabbed yet");
            return 0;
        }
        GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
        gint num = 0, denom=1;
        if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)){
            CV_WARN("GStreamer: unable to query framerate of stream");
            return 0;
        }
        return (double)num/(double)denom;
        break;
    }
    case CV_CAP_PROP_FOURCC:
        break;
    case CV_CAP_PROP_FRAME_COUNT:
        format = GST_FORMAT_DEFAULT;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink) {
            CV_WARN("GStreamer: there is no sink yet");
            return false;
        }
        return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
    default:
        CV_WARN("GStreamer: unhandled property");
        break;
    }

#undef FORMAT

    return false;
}
Example #10
0
/*!
 * \brief CvCapture_GStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CV_CAP_GSTREAMER_*
 * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 *  TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
 *  I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
 *
 */
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    gst_initializer::init();

    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    GstElementFactory * testfac;

    if (type == CV_CAP_GSTREAMER_V4L){
        testfac = gst_element_factory_find("v4lsrc");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4lsrc ! "COLOR_ELEM" ! appsink";
    }
    if (type == CV_CAP_GSTREAMER_V4L2){
        testfac = gst_element_factory_find("v4l2src");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4l2src ! "COLOR_ELEM" ! appsink";
    }


    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                //close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
#if GST_VERSION_MAJOR == 0
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
#else
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
#endif
            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        }
        g_free(protocol);

        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }

    if(manualpipeline)
    {
        GstIterator *it = NULL;
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));

        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                  sink = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);


        if (!sink){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#endif
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);
        // videoconvert (in 0.10: ffmpegcolorspace) automatically selects the correct colorspace
        // conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);

        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }

    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    caps = gst_caps_new_simple("video/x-raw-rgb",
                               "red_mask",   G_TYPE_INT, 0x0000FF,
                               "green_mask", G_TYPE_INT, 0x00FF00,
                               "blue_mask",  G_TYPE_INT, 0xFF0000,
                               NULL);
#else
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);

    //we do not start recording here just yet.
    // the user probably wants to set capture properties first, so start recording whenever the first frame is requested
    __END__;

    return true;
}