void ApplicationSink::setMaxBuffers(uint maxbuffers)
{
    d->lazyConstruct(this);
    if (d->appSink()) {
        gst_app_sink_set_max_buffers(d->appSink(), maxbuffers);
    }
}
static void
gst_app_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAppSink *appsink = GST_APP_SINK_CAST (object);

  switch (prop_id) {
    case PROP_CAPS:
      gst_app_sink_set_caps (appsink, gst_value_get_caps (value));
      break;
    case PROP_EMIT_SIGNALS:
      gst_app_sink_set_emit_signals (appsink, g_value_get_boolean (value));
      break;
    case PROP_MAX_BUFFERS:
      gst_app_sink_set_max_buffers (appsink, g_value_get_uint (value));
      break;
    case PROP_DROP:
      gst_app_sink_set_drop (appsink, g_value_get_boolean (value));
      break;
    case PROP_WAIT_ON_EOS:
      gst_app_sink_set_wait_on_eos (appsink, g_value_get_boolean (value));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
CAMLprim value ocaml_gstreamer_appsink_set_max_buffers(value _as, value _n)
{
  CAMLparam2(_as, _n);
  appsink *as = Appsink_val(_as);
  int n = Int_val(_n);

  caml_release_runtime_system();
  gst_app_sink_set_max_buffers(as->appsink, n);
  caml_acquire_runtime_system();

  CAMLreturn(Val_unit);
}
void GstAppSinkPipeline::Initialize(std::string pipelineString)
{	
	GstPipelineWrapper::InitializePipelineWithString(pipelineString);
	
	// setup appsink
	appsink = GstPipelineWrapper::GetElementByName(APPSINK_NAME);
	GstAppSinkCallbacks appsinkCallbacks;
	appsinkCallbacks.new_preroll	= &GstAppSinkPipeline::NewPrerollCallback;
	appsinkCallbacks.new_sample		= &GstAppSinkPipeline::NewSampleCallback;
	appsinkCallbacks.eos			= &GstAppSinkPipeline::EndOfStreamCallback; 
	
//	std::cout << pipelineString << std::endl;
	
	gst_app_sink_set_drop			(GST_APP_SINK(appsink), true);
	gst_app_sink_set_max_buffers	(GST_APP_SINK(appsink), 1);
	//gst_app_sink_set_emit_signals	(GST_APP_SINK(appsink), true);
	gst_app_sink_set_callbacks		(GST_APP_SINK(appsink), &appsinkCallbacks, this, (GDestroyNotify)GstAppSinkPipeline::DestroyCallback);		
}
/*!
 * \brief OpenIMAJCapGStreamer::setProperty
 * \param propId
 * \param value
 * \return success
 * Sets the desired property id with val. If the pipeline is running,
 * it is briefly stopped and started again after the property was set
 */
bool OpenIMAJCapGStreamer::setProperty( int propId, double value )
{
    GstFormat format;
    GstSeekFlags flags;
    
    if(!pipeline) {
        WARN("GStreamer: no pipeline");
        return false;
    }
    
    bool wasPlaying = this->isPipelinePlaying();
    if (wasPlaying)
        this->stopPipeline();
    
    
    switch(propId) {
        case CAP_PROP_POS_MSEC:
            format = GST_FORMAT_TIME;
            flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
            if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                        flags, (gint64) (value * GST_MSECOND))) {
                WARN("GStreamer: unable to seek");
            }
            break;
        case CAP_PROP_POS_FRAMES:
            format = GST_FORMAT_DEFAULT;
            flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
            if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                        flags, (gint64) value)) {
                WARN("GStreamer: unable to seek");
            }
            break;
        case CAP_PROP_POS_AVI_RATIO:
            format = GST_FORMAT_PERCENT;
            flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
            if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                        flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
                WARN("GStreamer: unable to seek");
            }
            break;
        case CAP_PROP_FRAME_WIDTH:
            if(value > 0)
                setFilter("width", G_TYPE_INT, (int) value, 0);
            else
                removeFilter("width");
            break;
        case CAP_PROP_FRAME_HEIGHT:
            if(value > 0)
                setFilter("height", G_TYPE_INT, (int) value, 0);
            else
                removeFilter("height");
            break;
        case CAP_PROP_FPS:
            if(value > 0) {
                double num=0, denom = 1;
                toFraction(value, num,  denom);
                setFilter("framerate", GST_TYPE_FRACTION, value, denom);
            } else
                removeFilter("framerate");
            break;
        case CAP_GSTREAMER_QUEUE_LENGTH:
            if(!sink)
                break;
            gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
            break;
        default:
            WARN("GStreamer: unhandled property");
    }
    
    if (wasPlaying)
        this->startPipeline();
    
    return false;
}
/*!
 * \brief OpenIMAJCapGStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CAP_GSTREAMER_*
 * \param filename Filename to open in case of CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, OpenIMAJCapGStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 */
bool OpenIMAJCapGStreamer::open(const char* filename )
{
    if(!isInited) {
        //FIXME: threadsafety
        gst_init (NULL, NULL);
        isInited = true;
    }

    
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    
    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                //close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }
    
    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);

            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        }
        g_free(protocol);
        
        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }
    
    if(manualpipeline)
    {
        GstIterator *it = NULL;
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));
        
        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;
        
        while (!done) {
            switch (gst_iterator_next (it, &value)) {
                case GST_ITERATOR_OK:
                    element = GST_ELEMENT (g_value_get_object (&value));
                    name = gst_element_get_name(element);
                    if (name){
                        if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                            sink = GST_ELEMENT ( gst_object_ref (element) );
                            done = TRUE;
                        }
                        g_free(name);
                    }
                    g_value_unset (&value);
                    
                    break;
                case GST_ITERATOR_RESYNC:
                    gst_iterator_resync (it);
                    break;
                case GST_ITERATOR_ERROR:
                case GST_ITERATOR_DONE:
                    done = TRUE;
                    break;
            }
        }
        gst_iterator_free (it);
        
        
        if (!sink){
            //ERROR(1, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
        
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);
        // videoconvert (in 0.10: ffmpegcolorspace) automatically selects the correct colorspace
        // conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);
        
        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        
        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                //ERROR(1, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }
        
        if(!gst_element_link(color, sink)) {
            //ERROR(1, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }
    
    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);
    
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);
    
    //we do not start recording here just yet.
    // the user probably wants to set capture properties first, so start recording whenever the first frame is requested
    
    return true;
}
bool ofGstVideoPlayer::loadMovie(string name){
	close();
	if( name.find( "file://",0 ) != string::npos){
		bIsStream		= false;
	}else if( name.find( "://",0 ) == string::npos){
		name 			= "file://"+ofToDataPath(name,true);
		bIsStream		= false;
	}else{
		bIsStream		= true;
	}
	ofLog(OF_LOG_VERBOSE,"loading "+name);

	ofGstUtils::startGstMainLoop();

	GstElement * gstPipeline = gst_element_factory_make("playbin2","player");
	g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL);

	// create the oF appsink for video rgb without sync to clock
	GstElement * gstSink = gst_element_factory_make("appsink", "app_sink");

	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);
	gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink), 8);
	gst_app_sink_set_drop (GST_APP_SINK(gstSink),true);
	gst_base_sink_set_max_lateness  (GST_BASE_SINK(gstSink), -1);

	int bpp;
	string mime;
	switch(internalPixelFormat){
	case OF_PIXELS_MONO:
		mime = "video/x-raw-gray";
		bpp = 8;
		break;
	case OF_PIXELS_RGB:
		mime = "video/x-raw-rgb";
		bpp = 24;
		break;
	case OF_PIXELS_RGBA:
	case OF_PIXELS_BGRA:
		mime = "video/x-raw-rgb";
		bpp = 32;
		break;
	default:
		mime = "video/x-raw-rgb";
		bpp=24;
		break;
	}

	GstCaps *caps = gst_caps_new_simple(mime.c_str(),
										"bpp", G_TYPE_INT, bpp,
										"depth", G_TYPE_INT, 24,
										"endianness",G_TYPE_INT,4321,
										"red_mask",G_TYPE_INT,0xff0000,
										"green_mask",G_TYPE_INT,0x00ff00,
										"blue_mask",G_TYPE_INT,0x0000ff,
										"alpha_mask",G_TYPE_INT,0x000000ff,


										NULL);
	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);

	if(threadAppSink){
		GstElement * appQueue = gst_element_factory_make("queue","appsink_queue");
		g_object_set(G_OBJECT(appQueue), "leaky", 0, "silent", 1, (void*)NULL);
		GstElement* appBin = gst_bin_new("app_bin");
		gst_bin_add(GST_BIN(appBin), appQueue);
		GstPad* appQueuePad = gst_element_get_static_pad(appQueue, "sink");
		GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", appQueuePad);
		gst_object_unref(appQueuePad);
		gst_element_add_pad(appBin, ghostPad);

		gst_bin_add_many(GST_BIN(appBin), gstSink, NULL);
		gst_element_link_many(appQueue, gstSink, NULL);

		g_object_set (G_OBJECT(gstPipeline),"video-sink",appBin,(void*)NULL);
	}else{
		g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL);
	}

#ifdef TARGET_WIN32
	GstElement *audioSink = gst_element_factory_make("directsoundsink", NULL);
	g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL);

#endif


	videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream);
	if(!bIsStream) return allocate(bpp);
	else return true;
}
bool ofGstUtils::startPipeline(){

	bPaused 			= true;
	speed 				= 1.0f;


	GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE(gstPipeline));

	if(bus){
		busWatchID = gst_bus_add_watch (bus, (GstBusFunc) busFunction, this);
	}

	gst_object_unref(bus);

	if(isAppSink){
		ofLogVerbose("ofGstUtils") << "startPipeline(): attaching callbacks";
		// set the appsink to not emit signals, we are using callbacks instead
		// and frameByFrame to get buffers by polling instead of callback
		g_object_set (G_OBJECT (gstSink), "emit-signals", FALSE, "sync", !bFrameByFrame, (void*)NULL);
		// gst_app_sink_set_drop(GST_APP_SINK(gstSink),1);
		// gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink),2);

		if(!bFrameByFrame){
			GstAppSinkCallbacks gstCallbacks;
			gstCallbacks.eos = &on_eos_from_source;
			gstCallbacks.new_preroll = &on_new_preroll_from_source;
#if GST_VERSION_MAJOR==0
			gstCallbacks.new_buffer = &on_new_buffer_from_source;
#else
			gstCallbacks.new_sample = &on_new_buffer_from_source;
#endif

			gst_app_sink_set_callbacks(GST_APP_SINK(gstSink), &gstCallbacks, this, NULL);
		}
	}

	// pause the pipeline
	//GstState targetState;
	GstState state;
	auto ret = gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_PAUSED);
    switch (ret) {
		case GST_STATE_CHANGE_FAILURE:
			ofLogError("ofGstUtils") << "startPipeline(): unable to pause pipeline";
			return false;
			break;
		case GST_STATE_CHANGE_NO_PREROLL:
			ofLogVerbose() << "Pipeline is live and does not need PREROLL waiting PLAY";
			gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_PLAYING);
			if(isAppSink){
				gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink),1);
				gst_app_sink_set_drop (GST_APP_SINK(gstSink),true);
			}
			break;
		case GST_STATE_CHANGE_ASYNC:
			ofLogVerbose() << "Pipeline is PREROLLING";
			//targetState = GST_STATE_PAUSED;
			if(!isStream && gst_element_get_state(gstPipeline,&state,NULL,5*GST_SECOND)!=GST_STATE_CHANGE_SUCCESS){
				ofLogError("ofGstUtils") << "startPipeline(): unable to pause pipeline after 5s";
				return false;
			}else{
				ofLogVerbose() << "Pipeline is PREROLLED";
			}
			break;
		case GST_STATE_CHANGE_SUCCESS:
			ofLogVerbose() << "Pipeline is PREROLLED";
			break;
    }

	// wait for paused state to query the duration
	if(!isStream){
		bPlaying = true;
		bLoaded = true;
	}

	return true;
}
/*
 *      PsychGSCreateMovie() -- Create a movie object.
 *
 *      This function tries to open a moviefile (with or without audio/video tracks)
 *      and create an associated movie object for it.
 *
 *      win = Pointer to window record of associated onscreen window.
 *      moviename = char* with the name of the moviefile.
 *      preloadSecs = How many seconds of the movie should be preloaded/prefetched into RAM at movie open time?
 *      moviehandle = handle to the new movie.
 */
void PsychGSCreateMovie(PsychWindowRecordType *win, const char* moviename, double preloadSecs, int* moviehandle)
{
    GstCaps                     *colorcaps;
    GstElement			*theMovie = NULL;
    GMainLoop			*MovieContext = NULL;
    GstBus			*bus = NULL;
    GstFormat			fmt;
    GstElement      *videosink;
    gint64			length_format;
    GstPad			*pad, *peerpad;
    const GstCaps		*caps;
    GstStructure		*str;
    gint			width,height;
    gint			rate1, rate2;
    int				i, slotid;
    GError			*error = NULL;
    char			movieLocation[FILENAME_MAX];
    psych_bool			trueValue = TRUE;
    char			msgerr[10000];
    char			errdesc[1000];
    psych_bool			printErrors;

    // Suppress output of error-messages if moviehandle == 1000. That means we
    // run in our own Posix-Thread, not in the Matlab-Thread. Printing via Matlabs
    // printing facilities would likely cause a terrible crash.
    printErrors = (*moviehandle == -1000) ? FALSE : TRUE;
    
    // Set movie handle to "failed" initially:
    *moviehandle = -1;

    // We start GStreamer only on first invocation.
    if (firsttime) {        
        // Initialize GStreamer: The routine is defined in PsychVideoCaptureSupportGStreamer.c
		PsychGSCheckInit("movie playback");
        firsttime = FALSE;
    }

    if (win && !PsychIsOnscreenWindow(win)) {
        if (printErrors) PsychErrorExitMsg(PsychError_user, "Provided windowPtr is not an onscreen window."); else return;
    }

    if (NULL == moviename) {
        if (printErrors) PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of moviename passed!"); else return;
    }

    if (numMovieRecords >= PSYCH_MAX_MOVIES) {
        *moviehandle = -2;
        if (printErrors) PsychErrorExitMsg(PsychError_user, "Allowed maximum number of simultaneously open movies exceeded!"); else return;
    }

    // Search first free slot in movieRecordBANK:
    for (i=0; (i < PSYCH_MAX_MOVIES) && (movieRecordBANK[i].theMovie); i++);
    if (i>=PSYCH_MAX_MOVIES) {
        *moviehandle = -2;
        if (printErrors) PsychErrorExitMsg(PsychError_user, "Allowed maximum number of simultaneously open movies exceeded!"); else return;
    }

    // Slot slotid will contain the movie record for our new movie object:
    slotid=i;

    // Zero-out new record in moviebank:
    memset(&movieRecordBANK[slotid], 0, sizeof(PsychMovieRecordType));
    
    // Create name-string for moviename: If an URI qualifier is at the beginning,
    // we're fine and just pass the URI as-is. Otherwise we add the file:// URI prefix.
    if (strstr(moviename, "://") || ((strstr(moviename, "v4l") == moviename) && strstr(moviename, "//"))) {
	snprintf(movieLocation, sizeof(movieLocation)-1, "%s", moviename);
    } else {
	snprintf(movieLocation, sizeof(movieLocation)-1, "file:///%s", moviename);
    }
    strncpy(movieRecordBANK[slotid].movieLocation, movieLocation, FILENAME_MAX);
    strncpy(movieRecordBANK[slotid].movieName, moviename, FILENAME_MAX);

    // Create movie playback pipeline:
    theMovie = gst_element_factory_make ("playbin2", "ptbmovieplaybackpipeline");

    // Assign name of movie to play:
    g_object_set(G_OBJECT(theMovie), "uri", movieLocation, NULL);

    // Connect callback to about-to-finish signal: Signal is emitted as soon as
    // end of current playback iteration is approaching. The callback checks if
    // looped playback is requested. If so, it schedules a new playback iteration.
    g_signal_connect(G_OBJECT(theMovie), "about-to-finish", G_CALLBACK(PsychMovieAboutToFinishCB), &(movieRecordBANK[slotid]));

    // Assign message context, message bus and message callback for
    // the pipeline to report events and state changes, errors etc.:    
    MovieContext = g_main_loop_new (NULL, FALSE);
    movieRecordBANK[slotid].MovieContext = MovieContext;
    bus = gst_pipeline_get_bus(GST_PIPELINE(theMovie));
    // Didn't work: g_signal_connect (G_OBJECT(bus), "message::error", G_CALLBACK(PsychMessageErrorCB), NULL);
    //              g_signal_connect (G_OBJECT(bus), "message::warning", G_CALLBACK(PsychMessageErrorCB), NULL);
    gst_bus_add_watch(bus, PsychMovieBusCallback, &(movieRecordBANK[slotid]));
    gst_object_unref(bus);

    // Assign a fakesink named "ptbsink0" as destination video-sink for
    // all video content. This allows us to get hold of the video frame buffers for
    // converting them into PTB OpenGL textures:
    videosink = gst_element_factory_make ("appsink", "ptbsink0");
    if (!videosink) {
	printf("PTB-ERROR: Failed to create video-sink appsink ptbsink!\n");
	PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE);
	PsychErrorExitMsg(PsychError_system, "Opening the movie failed. Reason hopefully given above.");
    };

    movieRecordBANK[slotid].videosink = videosink;

    // Our OpenGL texture creation routine needs GL_BGRA8 data in G_UNSIGNED_8_8_8_8_REV
    // format, but the pipeline usually delivers YUV data in planar format. Therefore
    // need to perform colorspace/colorformat conversion. We build a little videobin
    // which consists of a ffmpegcolorspace converter plugin connected to our appsink
    // plugin which will deliver video data to us for conversion into textures.
    // The "sink" pad of the converter plugin is connected as the "sink" pad of our
    // videobin, and the videobin is connected to the video-sink output of the pipeline,
    // thereby receiving decoded video data. We place a videocaps filter inbetween the
    // converter and the appsink to enforce a color format conversion to the "colorcaps"
    // we need. colorcaps define the needed data format for efficient conversion into
    // a RGBA8 texture:
    colorcaps = gst_caps_new_simple (   "video/x-raw-rgb",
					"bpp", G_TYPE_INT, 32,
					"depth", G_TYPE_INT, 32,
					"alpha_mask", G_TYPE_INT, 0x000000FF,
					"red_mask", G_TYPE_INT,   0x0000FF00,
					"green_mask", G_TYPE_INT, 0x00FF0000,
					"blue_mask", G_TYPE_INT,  0xFF000000,
					NULL);

    /*
    // Old style method: Only left here for documentation to show how one can create
    // video sub-pipelines via bin's and connect them to each other via ghostpads: 

    GstElement *videobin = gst_bin_new ("video_output_bin");
    GstElement *videocon = gst_element_factory_make ("ffmpegcolorspace", "color_converter");
    gst_bin_add_many(GST_BIN(videobin), videocon, videosink, NULL);

    GstPad *ghostpad = gst_ghost_pad_new("Video_Ghostsink", gst_element_get_pad(videocon, "sink"));
    gst_element_add_pad(videobin, ghostpad);

    gst_element_link_filtered(videocon, videosink, colorcaps);

    // Assign our special videobin as video-sink of the pipeline:
    g_object_set(G_OBJECT(theMovie), "video-sink", videobin, NULL);
    */

    // New style method: Leaves the freedom of choice of color converter (if any)
    // to the auto-plugger.

    // Assign 'colorcaps' as caps to our videosink. This marks the videosink so
    // that it can only receive video image data in the format defined by colorcaps,
    // i.e., a format that is easy to consume for OpenGL's texture creation on std.
    // gpu's. It is the job of the video pipeline's autoplugger to plug in proper
    // color & format conversion plugins to satisfy videosink's needs.
    gst_app_sink_set_caps(GST_APP_SINK(videosink), colorcaps);

    // Assign our special appsink 'videosink' as video-sink of the pipeline:
    g_object_set(G_OBJECT(theMovie), "video-sink", videosink, NULL);
    gst_caps_unref(colorcaps);

    // Get the pad from the final sink for probing width x height of movie frames and nominal framerate of movie:	
    pad = gst_element_get_pad(videosink, "sink");

    PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, FALSE);

    // Should we preroll / preload?	
    if ((preloadSecs > 0) || (preloadSecs == -1)) {
	// Preload / Preroll the pipeline:
	if (!PsychMoviePipelineSetState(theMovie, GST_STATE_PAUSED, 30.0)) {
		PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE);
		PsychErrorExitMsg(PsychError_user, "In OpenMovie: Opening the movie failed. Reason given above.");
	}
    } else {
	// Ready the pipeline:
	if (!PsychMoviePipelineSetState(theMovie, GST_STATE_READY, 30.0)) {
		PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE);
		PsychErrorExitMsg(PsychError_user, "In OpenMovie: Opening the movie failed. Reason given above.");
	}    
    }

    // Query number of available video and audio tracks in movie:
    g_object_get (G_OBJECT(theMovie),
               "n-video", &movieRecordBANK[slotid].nrVideoTracks,
               "n-audio", &movieRecordBANK[slotid].nrAudioTracks,
                NULL);

    // We need a valid onscreen window handle for real video playback:
    if ((NULL == win) && (movieRecordBANK[slotid].nrVideoTracks > 0)) {
        if (printErrors) PsychErrorExitMsg(PsychError_user, "No windowPtr to an onscreen window provided. Must do so for movies with videotrack!"); else return;
    }
 
    PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, FALSE);

    PsychInitMutex(&movieRecordBANK[slotid].mutex);
    PsychInitCondition(&movieRecordBANK[slotid].condition, NULL);

    if (oldstyle) {
	// Install the probe callback for reception of video frames from engine at the sink-pad itself:
	gst_pad_add_buffer_probe(pad, G_CALLBACK(PsychHaveVideoDataCallback), &(movieRecordBANK[slotid]));
    } else {
	// Install callbacks used by the videosink (appsink) to announce various events:
	gst_app_sink_set_callbacks(GST_APP_SINK(videosink), &videosinkCallbacks, &(movieRecordBANK[slotid]), PsychDestroyNotifyCallback);
    }

    // Drop frames if callback can't pull buffers fast enough:
    // This together with the max queue lengths of 1 allows to
    // maintain audio-video sync by framedropping if needed.
    gst_app_sink_set_drop(GST_APP_SINK(videosink), TRUE);

    // Only allow one queued buffer before dropping:
    gst_app_sink_set_max_buffers(GST_APP_SINK(videosink), 1);

    // Assign harmless initial settings for fps and frame size:
    rate1 = 0;
    rate2 = 1;
    width = height = 0;

    // Videotrack available?
    if (movieRecordBANK[slotid].nrVideoTracks > 0) {
	// Yes: Query size and framerate of movie:
	peerpad = gst_pad_get_peer(pad);
	caps=gst_pad_get_negotiated_caps(peerpad);
	if (caps) {
		str=gst_caps_get_structure(caps,0);

		/* Get some data about the frame */
		rate1 = 1; rate2 = 1;
		gst_structure_get_fraction(str, "pixel-aspect-ratio", &rate1, &rate2);
		movieRecordBANK[slotid].aspectRatio = (double) rate1 / (double) rate2;
		gst_structure_get_int(str,"width",&width);
		gst_structure_get_int(str,"height",&height);
		rate1 = 0; rate2 = 1;
		gst_structure_get_fraction(str, "framerate", &rate1, &rate2);

	 } else {
		printf("PTB-DEBUG: No frame info available after preroll.\n");	
	 }
    }

    if (strstr(moviename, "v4l2:")) {
	// Special case: The "movie" is actually a video4linux2 live source.
	// Need to make parameters up for now, so it to work as "movie":
	rate1 = 30; width = 640; height = 480;
	movieRecordBANK[slotid].nrVideoTracks = 1;

	// Uglyness at its best ;-)
	if (strstr(moviename, "320")) { width = 320; height = 240; };
    }

    // Release the pad:
    gst_object_unref(pad);

    // Assign new record in moviebank:
    movieRecordBANK[slotid].theMovie = theMovie;
    movieRecordBANK[slotid].loopflag = 0;
    movieRecordBANK[slotid].frameAvail = 0;
    movieRecordBANK[slotid].imageBuffer = NULL;

    *moviehandle = slotid;

    // Increase counter:
    numMovieRecords++;

    // Compute basic movie properties - Duration and fps as well as image size:
    
    // Retrieve duration in seconds:
    fmt = GST_FORMAT_TIME;
    if (gst_element_query_duration(theMovie, &fmt, &length_format)) {
	// This returns nsecs, so convert to seconds:
    	movieRecordBANK[slotid].movieduration = (double) length_format / (double) 1e9;
	//printf("PTB-DEBUG: Duration of movie %i [%s] is %lf seconds.\n", slotid, moviename, movieRecordBANK[slotid].movieduration);
    } else {
	movieRecordBANK[slotid].movieduration = DBL_MAX;
	printf("PTB-WARNING: Could not query duration of movie %i [%s] in seconds. Returning infinity.\n", slotid, moviename);
    }

    // Assign expected framerate, assuming a linear spacing between frames:
    movieRecordBANK[slotid].fps = (double) rate1 / (double) rate2;
    //printf("PTB-DEBUG: Framerate fps of movie %i [%s] is %lf fps.\n", slotid, moviename, movieRecordBANK[slotid].fps);

    // Compute framecount from fps and duration:
    movieRecordBANK[slotid].nrframes = (int)(movieRecordBANK[slotid].fps * movieRecordBANK[slotid].movieduration + 0.5);
    //printf("PTB-DEBUG: Number of frames in movie %i [%s] is %i.\n", slotid, moviename, movieRecordBANK[slotid].nrframes);

    // Define size of images in movie:
    movieRecordBANK[slotid].width = width;
    movieRecordBANK[slotid].height = height;

    // Ready to rock!
    return;
}
Exemple #10
0
bool CvCapture_GStreamer::setProperty( int propId, double value )
{
    GstFormat format;
    GstSeekFlags flags;

    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }

    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                    flags, (gint64) (value * GST_MSECOND))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                    flags, (gint64) value)) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                    flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_FRAME_WIDTH:
        if(value > 0)
            setFilter("width", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("width");
        break;
    case CV_CAP_PROP_FRAME_HEIGHT:
        if(value > 0)
            setFilter("height", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("height");
        break;
    case CV_CAP_PROP_FPS:
        if(value > 0) {
            int num, denom;
            num = (int) value;
            if(value != num) { // FIXME this supports only fractions x/1 and x/2
                num = (int) (value * 2);
                denom = 2;
            } else
                denom = 1;

            setFilter("framerate", GST_TYPE_FRACTION, num, denom);
        } else
            removeFilter("framerate");
        break;
    case CV_CAP_PROP_FOURCC:
    case CV_CAP_PROP_FRAME_COUNT:
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink)
            break;
        gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
        break;
    default:
        CV_WARN("GStreamer: unhandled property");
    }
    return false;
}
Exemple #11
0
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    close();
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    if(!isInited) {
//        printf("gst_init\n");
        gst_init (NULL, NULL);

//        gst_debug_set_active(TRUE);
//        gst_debug_set_colored(TRUE);
//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);

        isInited = true;
    }
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    if(type != CV_CAP_GSTREAMER_FILE) {
        close();
        return false;
    }

    if(!gst_uri_is_valid(filename)) {
        uri = realpath(filename, NULL);
        stream=false;
        if(uri) {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        } else {
            GError *err = NULL;
            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                CV_WARN("GStreamer: Error opening bin\n");
                close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    if(!uridecodebin) {
        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        if(!uridecodebin) {
            CV_WARN("GStreamer: Failed to create uridecodebin\n");
            close();
            return false;
        }
    }

    if(manualpipeline) {
        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }

        pipeline = uridecodebin;
    } else {
        pipeline = gst_pipeline_new (NULL);

        color = gst_element_factory_make("ffmpegcolorspace", NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);

    {
        GstCaps* caps;
        caps = gst_caps_new_simple("video/x-raw-rgb",
                                   "red_mask",   G_TYPE_INT, 0x0000FF,
                                   "green_mask", G_TYPE_INT, 0x00FF00,
                                   "blue_mask",  G_TYPE_INT, 0xFF0000,
                                   NULL);
        gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
        gst_caps_unref(caps);
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_WARN("GStreamer: unable to set pipeline to ready\n");
        gst_object_unref(pipeline);
        return false;
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
            GST_STATE_CHANGE_FAILURE) {
        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        CV_WARN("GStreamer: unable to set pipeline to playing\n");
        gst_object_unref(pipeline);
        return false;
    }



    handleMessage();

    __END__;

    return true;
}
/*!
 * \brief CvCapture_GStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CV_CAP_GSTREAMER_*
 * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 *  TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
 *  I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
 *
 */
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    gst_initializer::init();

    bool file = false;
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    GstElementFactory * testfac;
    GstStateChangeReturn status;

    if (type == CV_CAP_GSTREAMER_V4L){
        testfac = gst_element_factory_find("v4lsrc");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4lsrc ! "COLOR_ELEM" ! appsink";
    }
    if (type == CV_CAP_GSTREAMER_V4L2){
        testfac = gst_element_factory_find("v4l2src");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4l2src ! "COLOR_ELEM" ! appsink";
    }


    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(uri)
            {
                file = true;
            }
            else
            {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin)
            {
                fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
#if GST_VERSION_MAJOR == 0
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
#else
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
#endif
            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
        }
        g_free(protocol);

        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }

    if(manualpipeline)
    {
        GstIterator *it = NULL;
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));

        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                  sink = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);


        if (!sink){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#endif
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new(NULL);
        // videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert)
        //automatically selects the correct colorspace conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);

        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                pipeline = NULL;
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            pipeline = NULL;
            return false;
        }
    }

    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    caps = gst_caps_new_simple("video/x-raw-rgb",
                               "bpp",        G_TYPE_INT, 24,
                               "red_mask",   G_TYPE_INT, 0x0000FF,
                               "green_mask", G_TYPE_INT, 0x00FF00,
                               "blue_mask",  G_TYPE_INT, 0xFF0000,
                               NULL);
#else
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);

    // For video files only: set pipeline to PAUSED state to get its duration
    if (file)
    {
        status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            // wait for status update
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage(pipeline);
            gst_object_unref(pipeline);
            pipeline = NULL;
            CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
            return false;
        }

        GstFormat format;

        format = GST_FORMAT_DEFAULT;
#if GST_VERSION_MAJOR == 0
        if(!gst_element_query_duration(sink, &format, &duration))
#else
        if(!gst_element_query_duration(sink, format, &duration))
#endif
        {
            handleMessage(pipeline);
            CV_WARN("GStreamer: unable to query duration of stream");
            duration = -1;
            return true;
        }
    }
    else
    {
        duration = -1;
    }

    __END__;

    return true;
}
bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        printf("Cannot create Gstreamer pipeline\n");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create v4l2src
    GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL);
    if (v4l2src == NULL)
    {
        printf("Cannot create v4l2src\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream cameraDev;
    cameraDev << "/dev/video" << cameraIdx;
    g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), v4l2src);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        printf("Cannot create %s element\n", COLOR_ELEM);
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), color);

    // create appsink element
    sink = gst_element_factory_make("appsink", NULL);
    if (sink == NULL)
    {
        printf("Cannot create appsink element\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), sink);

    // if initial values for FrameSource::Parameters are not
    // specified, let's set them manually to prevent very huge images
    if (configuration.frameWidth == (vx_uint32)-1)
        configuration.frameWidth = 1920;
    if (configuration.frameHeight == (vx_uint32)-1)
        configuration.frameHeight = 1080;
    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = 30;

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_v42lsrc = gst_caps_new_simple ("video/x-raw-rgb",
                 "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth,
                 "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight,
                 "framerate", GST_TYPE_FRACTION, (int)configuration.fps,
                 NULL);
#else
    std::ostringstream stream;
    stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth <<
              "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";

    GstCaps* caps_v42lsrc = gst_caps_from_string(stream.str().c_str());
#endif

    if (caps_v42lsrc == NULL)
    {
        printf("Failed to create caps\n");
        FinalizeGstPipeLine();

        return false;
    }

    // link elements
    if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc))
    {
        printf("GStreamer: cannot link v4l2src -> color using caps\n");
        FinalizeGstPipeLine();
        gst_caps_unref(caps_v42lsrc);

        return false;
    }
    gst_caps_unref(caps_v42lsrc);

    // link elements
    if (!gst_element_link(color, sink))
    {
        printf("GStreamer: cannot link color -> appsink\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), true);

    // do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_appsink = gst_caps_new_simple("video/x-raw-rgb",
                                                "bpp",        G_TYPE_INT, 24,
                                                "red_mask",   G_TYPE_INT, 0xFF0000,
                                                "green_mask", G_TYPE_INT, 0x00FF00,
                                                "blue_mask",  G_TYPE_INT, 0x0000FF,
                                                NULL);
#else
    // support 1 and 3 channel 8 bit data
    GstCaps* caps_appsink = gst_caps_from_string("video/x-raw, format=(string){RGB, GRAY8};");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink);
    gst_caps_unref(caps_appsink);

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        printf("GStreamer: unable to start playback\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::unique_ptr<GstPad, GStreamerObjectDeleter> pad(gst_element_get_static_pad(color, "src"));
#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_caps(pad.get()));
#else
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_current_caps(pad.get()));
#endif

    const GstStructure *structure = gst_caps_get_structure(bufferCaps.get(), 0);

    int width, height;
    if (!gst_structure_get_int(structure, "width", &width))
    {
        handleGStreamerMessages();
        printf("Cannot query video width\n");
    }

    if (!gst_structure_get_int(structure, "height", &height))
    {
        handleGStreamerMessages();
        printf("Cannot query video height\n");
    }

    configuration.frameWidth = static_cast<vx_uint32>(width);
    configuration.frameHeight = static_cast<vx_uint32>(height);

    gint num = 0, denom = 1;
    if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
    {
        handleGStreamerMessages();
        printf("Cannot query video fps\n");
    }

    configuration.fps = static_cast<float>(num) / denom;
    end = false;

    return true;
}
bool GStreamerWrapper::open( std::string strFilename, bool bGenerateVideoBuffer, bool bGenerateAudioBuffer )
{
	if( m_bFileIsOpen )
	{
		stop();
		close();
	}

	// init property variables
	m_iNumVideoStreams = 0;
	m_iNumAudioStreams = 0;
	m_iCurrentVideoStream = 0;
	m_iCurrentAudioStream = 0;
	m_iWidth = m_iHeight = 0;
	m_iCurrentFrameNumber = 0;		// set to invalid, as it is not decoded yet
	m_dCurrentTimeInMs = 0;			// set to invalid, as it is not decoded yet
	m_bIsAudioSigned = false;
	m_bIsNewVideoFrame = false;
	m_iNumAudioChannels = 0;
	m_iAudioSampleRate = 0;
	m_iAudioBufferSize = 0;
	m_iAudioWidth = 0;
	m_AudioEndianness = LITTLE_ENDIAN;
	m_fFps = 0;
	m_dDurationInMs = 0;
	m_iNumberOfFrames = 0;

	m_fVolume = 1.0f;
	m_fSpeed = 1.0f;
	m_PlayDirection = FORWARD;
	m_CurrentPlayState = NOT_INITIALIZED;
	m_LoopMode = LOOP;
	m_strFilename = strFilename;

#ifdef THREADED_MESSAGE_HANDLER
		m_MsgHandlingThread = std::thread( std::bind( threadedMessageHandler, this ) );
#endif


	////////////////////////////////////////////////////////////////////////// PIPELINE
	// Init main pipeline --> playbin2
	m_GstPipeline = gst_element_factory_make( "playbin2", "pipeline" );

	// Check and re-arrange filename string
	if ( strFilename.find( "file:/", 0 ) == std::string::npos &&
		 strFilename.find( "file:///", 0 ) == std::string::npos &&
		 strFilename.find( "http://", 0 ) == std::string::npos )
	{
		strFilename = "file:/" + strFilename;
	}

	// Open Uri
	g_object_set( m_GstPipeline, "uri", strFilename.c_str(), NULL );


	////////////////////////////////////////////////////////////////////////// VIDEO SINK
	// Extract and Config Video Sink
	if ( bGenerateVideoBuffer )
	{
		// Create the video appsink and configure it
		m_GstVideoSink = gst_element_factory_make( "appsink", "videosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstVideoSink ), true );
		gst_app_sink_set_max_buffers( GST_APP_SINK( m_GstVideoSink ), 8 );
		gst_app_sink_set_drop( GST_APP_SINK( m_GstVideoSink ),true );
		gst_base_sink_set_max_lateness( GST_BASE_SINK( m_GstVideoSink ), -1);

		// Set some fix caps for the video sink
		// It would seem that GStreamer then tries to transform any incoming video stream according to these caps
		GstCaps* caps = gst_caps_new_simple( "video/x-raw-rgb",
			"bpp", G_TYPE_INT, 24,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL );


		gst_app_sink_set_caps( GST_APP_SINK( m_GstVideoSink ), caps );
		gst_caps_unref( caps );

		// Set the configured video appsink to the main pipeline
		g_object_set( m_GstPipeline, "video-sink", m_GstVideoSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstVideoSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Video Sink callback methods
		m_GstVideoSinkCallbacks.eos = &GStreamerWrapper::onEosFromVideoSource;
		m_GstVideoSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromVideoSource;
		m_GstVideoSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromVideoSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstVideoSink ), &m_GstVideoSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct show as playback plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* videoSink = gst_element_factory_make( "directdrawsink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#elif defined LINUX
		GstElement* videoSink = gst_element_factory_make( "xvimagesink", NULL );    //possible alternatives: ximagesink (no (gpu) fancy stuff) or better: cluttersink
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#else // Use Mac OSX plugin otherwise
		GstElement* videoSink = gst_element_factory_make( "osxvideosink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// AUDIO SINK
	// Extract and config Audio Sink
	if ( bGenerateAudioBuffer )
	{
		// Create and configure audio appsink
		m_GstAudioSink = gst_element_factory_make( "appsink", "audiosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstAudioSink ), true );
		// Set the configured audio appsink to the main pipeline
		g_object_set( m_GstPipeline, "audio-sink", m_GstAudioSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstAudioSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Audio Sink callback methods
		m_GstAudioSinkCallbacks.eos = &GStreamerWrapper::onEosFromAudioSource;
		m_GstAudioSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromAudioSource;
		m_GstAudioSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromAudioSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstAudioSink ), &m_GstAudioSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct sound plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* audioSink = gst_element_factory_make( "directsoundsink", NULL );
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#elif defined LINUX
		GstElement* audioSink = gst_element_factory_make( "pulsesink", NULL );  //alternative: alsasink
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#else // Use Mac OSC plugin otherwise
		GstElement* audioSink = gst_element_factory_make( "osxaudiosink", NULL );
		g_object_set ( m_GstPipeline,"audio-sink", audioSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// BUS
	// Set GstBus
	m_GstBus = gst_pipeline_get_bus( GST_PIPELINE( m_GstPipeline ) );

	if ( m_GstPipeline != NULL )
	{
//just add this callback for threaded message handling
#ifdef THREADED_MESSAGE_HANDLER
		gst_bus_add_watch (m_GstBus, onHandleGstMessages, this );
#endif
		// We need to stream the file a little bit in order to be able to retrieve information from it
		gst_element_set_state( m_GstPipeline, GST_STATE_READY );
		gst_element_set_state( m_GstPipeline, GST_STATE_PAUSED );

		// For some reason this is needed in order to gather video information such as size, framerate etc ...
		GstState state;
		gst_element_get_state( m_GstPipeline, &state, NULL, 2 * GST_SECOND );
		m_CurrentPlayState = OPENED;
	}

	// Retrieve and store all relevant Media Information
	retrieveVideoInfo();

	if( !hasVideo() && !hasAudio() )	// is a valid multimedia file?
	{
		close();
		return false;
	}

	// Print Media Info
	printMediaFileInfo();

	// TODO: Check if everything was initialized correctly
	// A file has been opened
	m_bFileIsOpen = true;

	return true;
}
bool GStreamerFramesReceiver::LoadVideo(char * URL)
{
	GstStateChangeReturn res;

	/* Initialize GStreamer */
	gst_init(NULL, NULL);

	/* Build the pipeline */
	GError *error = NULL;
	char * init_str = g_strdup_printf("rtspsrc location=%s latency=1000 drop-on-latency=false ! queue ! rtph264depay ! queue2 ! avdec_h264 ! queue2 ! appsink name=mysink", URL);
	pipeline = gst_parse_launch(init_str, &error);
	g_free(init_str);
	
	if (error)
	{
		gchar * message = g_strdup_printf("Unable to build pipeline: %s", error -> message);
		g_clear_error(&error);
		g_free(message);
		return false;
	}

	sink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");

	/* Instruct appsink to drop old buffers when the maximum amount of queued buffers is reached. */
	gst_app_sink_set_drop(GST_APP_SINK(sink), true);

	/* Set the maximum amount of buffers that can be queued in appsink.
	 * After this amount of buffers are queued in appsink, any more buffers
	 * will block upstream elements until a sample is pulled from appsink.
	 */
	gst_app_sink_set_max_buffers(GST_APP_SINK(sink), 1);		// number of queued recived buffers in appsink before updating new frame
	g_object_set(G_OBJECT(sink), "sync", TRUE, NULL);			// GST_OBJECT

	// Registering callbacks to appsink element
	GstAppSinkCallbacks callbacks = { on_eos, new_preroll, new_buffer, NULL };
	gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, NULL);

	res = gst_element_set_state (pipeline, GST_STATE_PLAYING);

	if (res == GST_STATE_CHANGE_FAILURE)
	{
		g_printerr ("Unable to set the pipeline to the playing state.\n");
		gst_object_unref (pipeline);
		pipeline = NULL;
		return false;
	}
	else if (res == GST_STATE_CHANGE_NO_PREROLL)
	{
		g_print ("live sources not supported yet\n");
		gst_object_unref (pipeline);
		pipeline = NULL;
		return false;
	}
	else if (res == GST_STATE_CHANGE_ASYNC)
	{
		// can happen when buffering occurs
		GstState current, pending;
		res = gst_element_get_state(GST_ELEMENT(pipeline), &current, &pending, GST_CLOCK_TIME_NONE);
		if(res == GST_STATE_CHANGE_FAILURE || res == GST_STATE_CHANGE_ASYNC)
		{
			g_printerr ("Unable to set the pipeline to the playing state.\n");
			gst_object_unref (pipeline);
			pipeline = NULL;
			return false;
		}
	}

	bool isFrameOK = false;

	/* get the preroll buffer from appsink, this block untils appsink really prerolls */
	GstSample * sample;
	g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL);

	if (sample)
	{
		/* get the snapshot buffer format now. We set the caps on the appsink so
		 * that it can only be an rgb buffer. The only thing we have not specified
		 * on the caps is the height, which is dependant on the pixel-aspect-ratio
		 * of the source material
		 */
		GstCaps *caps = gst_sample_get_caps(sample);
		int width, height;
		PixelFormat pixelFormat;
		isFrameOK = ExtractImageParams(caps, width, height, pixelFormat);
		gst_sample_unref (sample);
	}

	if (!isFrameOK)
	{
		g_printerr ("Unable to get the snapshot buffer format.\n");
		gst_object_unref (pipeline);
		pipeline = NULL;
		return false;
	}
	
	mainLoopThread = g_thread_new("mainLoopThread", MainLoopThreadFunction, this);

	return true;
}