Пример #1
0
/*!
 * \brief OpenIMAJCapGStreamer::setFilter
 * \param prop the property name
 * \param type glib property type
 * \param v1 the value
 * \param v2 second value of property type requires it, else NULL
 * Filter the output formats by setting appsink caps properties
 */
void OpenIMAJCapGStreamer::setFilter(const char *prop, GType type, int v1, int v2)
{
    if(!caps || !( GST_IS_CAPS (caps) ))
    {
        if(type == G_TYPE_INT)
        {
            caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
        }
        else
        {
            caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
        }
    }
    else
    {
        if (! gst_caps_is_writable(caps))
            caps = gst_caps_make_writable (caps);
        if(type == G_TYPE_INT){
            gst_caps_set_simple(caps, prop, type, v1, NULL);
        }else{
            gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
        }
    }
    
    caps = gst_caps_fixate(caps);
    
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
}
Пример #2
0
static void
gst_app_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAppSink *appsink = GST_APP_SINK_CAST (object);

  switch (prop_id) {
    case PROP_CAPS:
      gst_app_sink_set_caps (appsink, gst_value_get_caps (value));
      break;
    case PROP_EMIT_SIGNALS:
      gst_app_sink_set_emit_signals (appsink, g_value_get_boolean (value));
      break;
    case PROP_MAX_BUFFERS:
      gst_app_sink_set_max_buffers (appsink, g_value_get_uint (value));
      break;
    case PROP_DROP:
      gst_app_sink_set_drop (appsink, g_value_get_boolean (value));
      break;
    case PROP_WAIT_ON_EOS:
      gst_app_sink_set_wait_on_eos (appsink, g_value_get_boolean (value));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Пример #3
0
void ApplicationSink::setCaps(const CapsPtr & caps)
{
    d->lazyConstruct(this);
    if (d->appSink()) {
        gst_app_sink_set_caps(d->appSink(), caps);
    }
}
void AudioSourceProviderGStreamer::handleNewDeinterleavePad(GstPad* pad)
{
    m_deinterleaveSourcePads++;

    if (m_deinterleaveSourcePads > 2) {
        g_warning("The AudioSourceProvider supports only mono and stereo audio. Silencing out this new channel.");
        GstElement* queue = gst_element_factory_make("queue", 0);
        GstElement* sink = gst_element_factory_make("fakesink", 0);
        g_object_set(sink, "async", FALSE, nullptr);
        gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr);

        GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
        gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);

        GQuark quark = g_quark_from_static_string("peer");
        g_object_set_qdata(G_OBJECT(pad), quark, sinkPad.get());
        gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING);
        gst_element_sync_state_with_parent(queue);
        gst_element_sync_state_with_parent(sink);
        return;
    }

    // A new pad for a planar channel was added in deinterleave. Plug
    // in an appsink so we can pull the data from each
    // channel. Pipeline looks like:
    // ... deinterleave ! queue ! appsink.
    GstElement* queue = gst_element_factory_make("queue", 0);
    GstElement* sink = gst_element_factory_make("appsink", 0);

    GstAppSinkCallbacks callbacks;
    callbacks.eos = 0;
    callbacks.new_preroll = 0;
    callbacks.new_sample = onAppsinkNewBufferCallback;
    gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, 0);

    g_object_set(sink, "async", FALSE, nullptr);

    GRefPtr<GstCaps> caps = adoptGRef(gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(gSampleBitRate),
        "channels", G_TYPE_INT, 1,
        "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
        "layout", G_TYPE_STRING, "interleaved", nullptr));

    gst_app_sink_set_caps(GST_APP_SINK(sink), caps.get());

    gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr);

    GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
    gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);

    GQuark quark = g_quark_from_static_string("peer");
    g_object_set_qdata(G_OBJECT(pad), quark, sinkPad.get());

    gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING);

    sinkPad = adoptGRef(gst_element_get_static_pad(sink, "sink"));
    gst_pad_add_probe(sinkPad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, onAppsinkFlushCallback, this, nullptr);

    gst_element_sync_state_with_parent(queue);
    gst_element_sync_state_with_parent(sink);
}
Пример #5
0
/*!
 * \brief OpenIMAJCapGStreamer::removeFilter
 * \param filter filter to remove
 * remove the specified filter from the appsink template caps
 */
void OpenIMAJCapGStreamer::removeFilter(const char *filter)
{
    if(!caps)
        return;
    
    if (! gst_caps_is_writable(caps))
        caps = gst_caps_make_writable (caps);
    
    GstStructure *s = gst_caps_get_structure(caps, 0);
    gst_structure_remove_field(s, filter);
    
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
}
Пример #6
0
/*!
 * \brief CvCapture_GStreamer::removeFilter
 * \param filter filter to remove
 * remove the specified filter from the appsink template caps
 */
void CvCapture_GStreamer::removeFilter(const char *filter)
{
    if(!caps)
        return;

#if GST_VERSION_MAJOR > 0
    if (! gst_caps_is_writable(caps))
        caps = gst_caps_make_writable (caps);
#endif

    GstStructure *s = gst_caps_get_structure(caps, 0);
    gst_structure_remove_field(s, filter);

    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
}
Пример #7
0
bool ofGstUtils::loadMovie(string name){
	bpp 				= 24;
	bLoaded      		= false;
	bPaused 			= true;
	speed 				= 1.0f;
	bHavePixelsChanged 	= false;
	if( name.find( "://",0 ) == string::npos){
		name 			= "file://"+ofToDataPath(name,true);
		bIsStream		= false;
	}else{
		bIsStream		= true;
	}
	ofLog(OF_LOG_VERBOSE,"loading "+name);

	gstData.loop		= g_main_loop_new (NULL, FALSE);



	gstPipeline = gst_element_factory_make("playbin","player");
	g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL);

	// create the oF appsink for video rgb without sync to clock
	gstSink = gst_element_factory_make("appsink", NULL);
	GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb",
										"bpp", G_TYPE_INT, 24,
										//"depth", G_TYPE_INT, 24,
										/*"endianness",G_TYPE_INT,4321,
										"red_mask",G_TYPE_INT,0xff0000,
										"green_mask",G_TYPE_INT,0x00ff00,
										"blue_mask",G_TYPE_INT,0x0000ff,*/


										NULL);
	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), false);

	g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL);


	GstElement *audioSink = gst_element_factory_make("gconfaudiosink", NULL);
	g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL);

	return startPipeline();;

}
Пример #8
0
static void
gst_app_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAppSink *appsink = GST_APP_SINK (object);

  //printf("appsinksetproperty\n");

  switch (prop_id) {
    case PROP_CAPS:
      gst_app_sink_set_caps (appsink, gst_value_get_caps (value));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
Пример #9
0
GstElement *MediaPlayer::createVideoSink()
{
    GstElement * sink = createElement("appsink", "videosink");

    if ( !sink )
        return 0;

    // Set the caps - so far we only want our image to be RGB
    GstCaps *sinkCaps = gst_caps_new_simple( "video/x-raw", "format", G_TYPE_STRING, "BGRA", NULL );
    gst_app_sink_set_caps( GST_APP_SINK( sink ), sinkCaps );
    gst_caps_unref(sinkCaps);

    // Set up the callbacks
    GstAppSinkCallbacks callbacks = { 0, 0, 0, 0, 0 };
    callbacks.new_sample = cb_new_sample;

    gst_app_sink_set_callbacks( GST_APP_SINK(sink), &callbacks, this, NULL );
    return sink;
}
Пример #10
0
/*!
 * \brief CvCapture_GStreamer::setFilter
 * \param prop the property name
 * \param type glib property type
 * \param v1 the value
 * \param v2 second value of property type requires it, else NULL
 * Filter the output formats by setting appsink caps properties
 */
void CvCapture_GStreamer::setFilter(const char *prop, int type, int v1, int v2)
{
    //printf("GStreamer: setFilter \n");
    if(!caps || !( GST_IS_CAPS (caps) ))
    {
        if(type == G_TYPE_INT)
        {
#if GST_VERSION_MAJOR == 0
            caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL);
#else
            caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
#endif
        }
        else
        {
#if GST_VERSION_MAJOR == 0
            caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL);
#else
            caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
#endif
        }
    }
    else
    {
#if GST_VERSION_MAJOR > 0
        if (! gst_caps_is_writable(caps))
            caps = gst_caps_make_writable (caps);
#endif
        if(type == G_TYPE_INT){
            gst_caps_set_simple(caps, prop, type, v1, NULL);
        }else{
            gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
        }
    }

#if GST_VERSION_MAJOR > 0
    caps = gst_caps_fixate(caps);
#endif

    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    //printf("filtering with %s\n", gst_caps_to_string(caps));
}
/*
 *      PsychGSCreateMovie() -- Create a movie object.
 *
 *      This function tries to open a moviefile (with or without audio/video tracks)
 *      and create an associated movie object for it.
 *
 *      win = Pointer to window record of associated onscreen window.
 *      moviename = char* with the name of the moviefile.
 *      preloadSecs = How many seconds of the movie should be preloaded/prefetched into RAM at movie open time?
 *      moviehandle = handle to the new movie.
 */
void PsychGSCreateMovie(PsychWindowRecordType *win, const char* moviename, double preloadSecs, int* moviehandle)
{
    GstCaps                     *colorcaps;
    GstElement			*theMovie = NULL;
    GMainLoop			*MovieContext = NULL;
    GstBus			*bus = NULL;
    GstFormat			fmt;
    GstElement      *videosink;
    gint64			length_format;
    GstPad			*pad, *peerpad;
    const GstCaps		*caps;
    GstStructure		*str;
    gint			width,height;
    gint			rate1, rate2;
    int				i, slotid;
    GError			*error = NULL;
    char			movieLocation[FILENAME_MAX];
    psych_bool			trueValue = TRUE;
    char			msgerr[10000];
    char			errdesc[1000];
    psych_bool			printErrors;

    // Suppress output of error-messages if moviehandle == 1000. That means we
    // run in our own Posix-Thread, not in the Matlab-Thread. Printing via Matlabs
    // printing facilities would likely cause a terrible crash.
    printErrors = (*moviehandle == -1000) ? FALSE : TRUE;
    
    // Set movie handle to "failed" initially:
    *moviehandle = -1;

    // We start GStreamer only on first invocation.
    if (firsttime) {        
        // Initialize GStreamer: The routine is defined in PsychVideoCaptureSupportGStreamer.c
		PsychGSCheckInit("movie playback");
        firsttime = FALSE;
    }

    if (win && !PsychIsOnscreenWindow(win)) {
        if (printErrors) PsychErrorExitMsg(PsychError_user, "Provided windowPtr is not an onscreen window."); else return;
    }

    if (NULL == moviename) {
        if (printErrors) PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of moviename passed!"); else return;
    }

    if (numMovieRecords >= PSYCH_MAX_MOVIES) {
        *moviehandle = -2;
        if (printErrors) PsychErrorExitMsg(PsychError_user, "Allowed maximum number of simultaneously open movies exceeded!"); else return;
    }

    // Search first free slot in movieRecordBANK:
    for (i=0; (i < PSYCH_MAX_MOVIES) && (movieRecordBANK[i].theMovie); i++);
    if (i>=PSYCH_MAX_MOVIES) {
        *moviehandle = -2;
        if (printErrors) PsychErrorExitMsg(PsychError_user, "Allowed maximum number of simultaneously open movies exceeded!"); else return;
    }

    // Slot slotid will contain the movie record for our new movie object:
    slotid=i;

    // Zero-out new record in moviebank:
    memset(&movieRecordBANK[slotid], 0, sizeof(PsychMovieRecordType));
    
    // Create name-string for moviename: If an URI qualifier is at the beginning,
    // we're fine and just pass the URI as-is. Otherwise we add the file:// URI prefix.
    if (strstr(moviename, "://") || ((strstr(moviename, "v4l") == moviename) && strstr(moviename, "//"))) {
	snprintf(movieLocation, sizeof(movieLocation)-1, "%s", moviename);
    } else {
	snprintf(movieLocation, sizeof(movieLocation)-1, "file:///%s", moviename);
    }
    strncpy(movieRecordBANK[slotid].movieLocation, movieLocation, FILENAME_MAX);
    strncpy(movieRecordBANK[slotid].movieName, moviename, FILENAME_MAX);

    // Create movie playback pipeline:
    theMovie = gst_element_factory_make ("playbin2", "ptbmovieplaybackpipeline");

    // Assign name of movie to play:
    g_object_set(G_OBJECT(theMovie), "uri", movieLocation, NULL);

    // Connect callback to about-to-finish signal: Signal is emitted as soon as
    // end of current playback iteration is approaching. The callback checks if
    // looped playback is requested. If so, it schedules a new playback iteration.
    g_signal_connect(G_OBJECT(theMovie), "about-to-finish", G_CALLBACK(PsychMovieAboutToFinishCB), &(movieRecordBANK[slotid]));

    // Assign message context, message bus and message callback for
    // the pipeline to report events and state changes, errors etc.:    
    MovieContext = g_main_loop_new (NULL, FALSE);
    movieRecordBANK[slotid].MovieContext = MovieContext;
    bus = gst_pipeline_get_bus(GST_PIPELINE(theMovie));
    // Didn't work: g_signal_connect (G_OBJECT(bus), "message::error", G_CALLBACK(PsychMessageErrorCB), NULL);
    //              g_signal_connect (G_OBJECT(bus), "message::warning", G_CALLBACK(PsychMessageErrorCB), NULL);
    gst_bus_add_watch(bus, PsychMovieBusCallback, &(movieRecordBANK[slotid]));
    gst_object_unref(bus);

    // Assign a fakesink named "ptbsink0" as destination video-sink for
    // all video content. This allows us to get hold of the video frame buffers for
    // converting them into PTB OpenGL textures:
    videosink = gst_element_factory_make ("appsink", "ptbsink0");
    if (!videosink) {
	printf("PTB-ERROR: Failed to create video-sink appsink ptbsink!\n");
	PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE);
	PsychErrorExitMsg(PsychError_system, "Opening the movie failed. Reason hopefully given above.");
    };

    movieRecordBANK[slotid].videosink = videosink;

    // Our OpenGL texture creation routine needs GL_BGRA8 data in G_UNSIGNED_8_8_8_8_REV
    // format, but the pipeline usually delivers YUV data in planar format. Therefore
    // need to perform colorspace/colorformat conversion. We build a little videobin
    // which consists of a ffmpegcolorspace converter plugin connected to our appsink
    // plugin which will deliver video data to us for conversion into textures.
    // The "sink" pad of the converter plugin is connected as the "sink" pad of our
    // videobin, and the videobin is connected to the video-sink output of the pipeline,
    // thereby receiving decoded video data. We place a videocaps filter inbetween the
    // converter and the appsink to enforce a color format conversion to the "colorcaps"
    // we need. colorcaps define the needed data format for efficient conversion into
    // a RGBA8 texture:
    colorcaps = gst_caps_new_simple (   "video/x-raw-rgb",
					"bpp", G_TYPE_INT, 32,
					"depth", G_TYPE_INT, 32,
					"alpha_mask", G_TYPE_INT, 0x000000FF,
					"red_mask", G_TYPE_INT,   0x0000FF00,
					"green_mask", G_TYPE_INT, 0x00FF0000,
					"blue_mask", G_TYPE_INT,  0xFF000000,
					NULL);

    /*
    // Old style method: Only left here for documentation to show how one can create
    // video sub-pipelines via bin's and connect them to each other via ghostpads: 

    GstElement *videobin = gst_bin_new ("video_output_bin");
    GstElement *videocon = gst_element_factory_make ("ffmpegcolorspace", "color_converter");
    gst_bin_add_many(GST_BIN(videobin), videocon, videosink, NULL);

    GstPad *ghostpad = gst_ghost_pad_new("Video_Ghostsink", gst_element_get_pad(videocon, "sink"));
    gst_element_add_pad(videobin, ghostpad);

    gst_element_link_filtered(videocon, videosink, colorcaps);

    // Assign our special videobin as video-sink of the pipeline:
    g_object_set(G_OBJECT(theMovie), "video-sink", videobin, NULL);
    */

    // New style method: Leaves the freedom of choice of color converter (if any)
    // to the auto-plugger.

    // Assign 'colorcaps' as caps to our videosink. This marks the videosink so
    // that it can only receive video image data in the format defined by colorcaps,
    // i.e., a format that is easy to consume for OpenGL's texture creation on std.
    // gpu's. It is the job of the video pipeline's autoplugger to plug in proper
    // color & format conversion plugins to satisfy videosink's needs.
    gst_app_sink_set_caps(GST_APP_SINK(videosink), colorcaps);

    // Assign our special appsink 'videosink' as video-sink of the pipeline:
    g_object_set(G_OBJECT(theMovie), "video-sink", videosink, NULL);
    gst_caps_unref(colorcaps);

    // Get the pad from the final sink for probing width x height of movie frames and nominal framerate of movie:	
    pad = gst_element_get_pad(videosink, "sink");

    PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, FALSE);

    // Should we preroll / preload?	
    if ((preloadSecs > 0) || (preloadSecs == -1)) {
	// Preload / Preroll the pipeline:
	if (!PsychMoviePipelineSetState(theMovie, GST_STATE_PAUSED, 30.0)) {
		PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE);
		PsychErrorExitMsg(PsychError_user, "In OpenMovie: Opening the movie failed. Reason given above.");
	}
    } else {
	// Ready the pipeline:
	if (!PsychMoviePipelineSetState(theMovie, GST_STATE_READY, 30.0)) {
		PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, TRUE);
		PsychErrorExitMsg(PsychError_user, "In OpenMovie: Opening the movie failed. Reason given above.");
	}    
    }

    // Query number of available video and audio tracks in movie:
    g_object_get (G_OBJECT(theMovie),
               "n-video", &movieRecordBANK[slotid].nrVideoTracks,
               "n-audio", &movieRecordBANK[slotid].nrAudioTracks,
                NULL);

    // We need a valid onscreen window handle for real video playback:
    if ((NULL == win) && (movieRecordBANK[slotid].nrVideoTracks > 0)) {
        if (printErrors) PsychErrorExitMsg(PsychError_user, "No windowPtr to an onscreen window provided. Must do so for movies with videotrack!"); else return;
    }
 
    PsychGSProcessMovieContext(movieRecordBANK[slotid].MovieContext, FALSE);

    PsychInitMutex(&movieRecordBANK[slotid].mutex);
    PsychInitCondition(&movieRecordBANK[slotid].condition, NULL);

    if (oldstyle) {
	// Install the probe callback for reception of video frames from engine at the sink-pad itself:
	gst_pad_add_buffer_probe(pad, G_CALLBACK(PsychHaveVideoDataCallback), &(movieRecordBANK[slotid]));
    } else {
	// Install callbacks used by the videosink (appsink) to announce various events:
	gst_app_sink_set_callbacks(GST_APP_SINK(videosink), &videosinkCallbacks, &(movieRecordBANK[slotid]), PsychDestroyNotifyCallback);
    }

    // Drop frames if callback can't pull buffers fast enough:
    // This together with the max queue lengths of 1 allows to
    // maintain audio-video sync by framedropping if needed.
    gst_app_sink_set_drop(GST_APP_SINK(videosink), TRUE);

    // Only allow one queued buffer before dropping:
    gst_app_sink_set_max_buffers(GST_APP_SINK(videosink), 1);

    // Assign harmless initial settings for fps and frame size:
    rate1 = 0;
    rate2 = 1;
    width = height = 0;

    // Videotrack available?
    if (movieRecordBANK[slotid].nrVideoTracks > 0) {
	// Yes: Query size and framerate of movie:
	peerpad = gst_pad_get_peer(pad);
	caps=gst_pad_get_negotiated_caps(peerpad);
	if (caps) {
		str=gst_caps_get_structure(caps,0);

		/* Get some data about the frame */
		rate1 = 1; rate2 = 1;
		gst_structure_get_fraction(str, "pixel-aspect-ratio", &rate1, &rate2);
		movieRecordBANK[slotid].aspectRatio = (double) rate1 / (double) rate2;
		gst_structure_get_int(str,"width",&width);
		gst_structure_get_int(str,"height",&height);
		rate1 = 0; rate2 = 1;
		gst_structure_get_fraction(str, "framerate", &rate1, &rate2);

	 } else {
		printf("PTB-DEBUG: No frame info available after preroll.\n");	
	 }
    }

    if (strstr(moviename, "v4l2:")) {
	// Special case: The "movie" is actually a video4linux2 live source.
	// Need to make parameters up for now, so it to work as "movie":
	rate1 = 30; width = 640; height = 480;
	movieRecordBANK[slotid].nrVideoTracks = 1;

	// Uglyness at its best ;-)
	if (strstr(moviename, "320")) { width = 320; height = 240; };
    }

    // Release the pad:
    gst_object_unref(pad);

    // Assign new record in moviebank:
    movieRecordBANK[slotid].theMovie = theMovie;
    movieRecordBANK[slotid].loopflag = 0;
    movieRecordBANK[slotid].frameAvail = 0;
    movieRecordBANK[slotid].imageBuffer = NULL;

    *moviehandle = slotid;

    // Increase counter:
    numMovieRecords++;

    // Compute basic movie properties - Duration and fps as well as image size:
    
    // Retrieve duration in seconds:
    fmt = GST_FORMAT_TIME;
    if (gst_element_query_duration(theMovie, &fmt, &length_format)) {
	// This returns nsecs, so convert to seconds:
    	movieRecordBANK[slotid].movieduration = (double) length_format / (double) 1e9;
	//printf("PTB-DEBUG: Duration of movie %i [%s] is %lf seconds.\n", slotid, moviename, movieRecordBANK[slotid].movieduration);
    } else {
	movieRecordBANK[slotid].movieduration = DBL_MAX;
	printf("PTB-WARNING: Could not query duration of movie %i [%s] in seconds. Returning infinity.\n", slotid, moviename);
    }

    // Assign expected framerate, assuming a linear spacing between frames:
    movieRecordBANK[slotid].fps = (double) rate1 / (double) rate2;
    //printf("PTB-DEBUG: Framerate fps of movie %i [%s] is %lf fps.\n", slotid, moviename, movieRecordBANK[slotid].fps);

    // Compute framecount from fps and duration:
    movieRecordBANK[slotid].nrframes = (int)(movieRecordBANK[slotid].fps * movieRecordBANK[slotid].movieduration + 0.5);
    //printf("PTB-DEBUG: Number of frames in movie %i [%s] is %i.\n", slotid, moviename, movieRecordBANK[slotid].nrframes);

    // Define size of images in movie:
    movieRecordBANK[slotid].width = width;
    movieRecordBANK[slotid].height = height;

    // Ready to rock!
    return;
}
Пример #12
0
  bool GSCam::init_stream()
  {
    if(!gst_is_initialized()) {
      // Initialize gstreamer pipeline
      ROS_DEBUG_STREAM( "Initializing gstreamer..." );
      gst_init(0,0);
    }

    ROS_DEBUG_STREAM( "Gstreamer Version: " << gst_version_string() );

    GError *error = 0; // Assignment to zero is a gst requirement

    pipeline_ = gst_parse_launch(gsconfig_.c_str(), &error);
    if (pipeline_ == NULL) {
      ROS_FATAL_STREAM( error->message );
      return false;
    }

    // Create RGB sink
    sink_ = gst_element_factory_make("appsink",NULL);
    GstCaps * caps = image_encoding_ == sensor_msgs::image_encodings::RGB8 ?
        gst_caps_new_simple("video/x-raw-rgb", NULL) :
        gst_caps_new_simple("video/x-raw-gray", NULL);
    gst_app_sink_set_caps(GST_APP_SINK(sink_), caps);
    gst_caps_unref(caps);

    // Set whether the sink should sync
    // Sometimes setting this to true can cause a large number of frames to be
    // dropped
    gst_base_sink_set_sync(
        GST_BASE_SINK(sink_),
        (sync_sink_) ? TRUE : FALSE);

    if(GST_IS_PIPELINE(pipeline_)) {
      GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline_), GST_PAD_SRC);
      g_assert(outpad);

      GstElement *outelement = gst_pad_get_parent_element(outpad);
      g_assert(outelement);
      gst_object_unref(outpad);

      if(!gst_bin_add(GST_BIN(pipeline_), sink_)) {
        ROS_FATAL("gst_bin_add() failed");
        gst_object_unref(outelement);
        gst_object_unref(pipeline_);
        return false;
      }

      if(!gst_element_link(outelement, sink_)) {
        ROS_FATAL("GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement));
        gst_object_unref(outelement);
        gst_object_unref(pipeline_);
        return false;
      }

      gst_object_unref(outelement);
    } else {
      GstElement* launchpipe = pipeline_;
      pipeline_ = gst_pipeline_new(NULL);
      g_assert(pipeline_);

      gst_object_unparent(GST_OBJECT(launchpipe));

      gst_bin_add_many(GST_BIN(pipeline_), launchpipe, sink_, NULL);

      if(!gst_element_link(launchpipe, sink_)) {
        ROS_FATAL("GStreamer: cannot link launchpipe -> sink");
        gst_object_unref(pipeline_);
        return false;
      }
    }

    gst_element_set_state(pipeline_, GST_STATE_PAUSED);

    if (gst_element_get_state(pipeline_, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
      ROS_FATAL("Failed to PAUSE stream, check your gstreamer configuration.");
      return false;
    } else {
      ROS_DEBUG_STREAM("Stream is PAUSED.");
    }

    // Create ROS camera interface
    camera_pub_ = image_transport_.advertiseCamera("camera/image_raw", 1);

    return true;
  }
Пример #13
0
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    close();
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    if(!isInited) {
//        printf("gst_init\n");
        gst_init (NULL, NULL);

//        gst_debug_set_active(TRUE);
//        gst_debug_set_colored(TRUE);
//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);

        isInited = true;
    }
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    if(type != CV_CAP_GSTREAMER_FILE) {
        close();
        return false;
    }

    if(!gst_uri_is_valid(filename)) {
        uri = realpath(filename, NULL);
        stream=false;
        if(uri) {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        } else {
            GError *err = NULL;
            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                CV_WARN("GStreamer: Error opening bin\n");
                close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    if(!uridecodebin) {
        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        if(!uridecodebin) {
            CV_WARN("GStreamer: Failed to create uridecodebin\n");
            close();
            return false;
        }
    }

    if(manualpipeline) {
        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }

        pipeline = uridecodebin;
    } else {
        pipeline = gst_pipeline_new (NULL);

        color = gst_element_factory_make("ffmpegcolorspace", NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);

    {
        GstCaps* caps;
        caps = gst_caps_new_simple("video/x-raw-rgb",
                                   "red_mask",   G_TYPE_INT, 0x0000FF,
                                   "green_mask", G_TYPE_INT, 0x00FF00,
                                   "blue_mask",  G_TYPE_INT, 0xFF0000,
                                   NULL);
        gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
        gst_caps_unref(caps);
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_WARN("GStreamer: unable to set pipeline to ready\n");
        gst_object_unref(pipeline);
        return false;
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
            GST_STATE_CHANGE_FAILURE) {
        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        CV_WARN("GStreamer: unable to set pipeline to playing\n");
        gst_object_unref(pipeline);
        return false;
    }



    handleMessage();

    __END__;

    return true;
}
Пример #14
0
static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename)
{
	CvCapture_GStreamer *capture = 0;
	CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

	__BEGIN__;

//	teststreamer(filename);

//	return 0;

	if(!isInited) {
		printf("gst_init\n");
		gst_init (NULL, NULL);

// according to the documentation this is the way to register a plugin now
// unfortunately, it has not propagated into my distribution yet...
// 		gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
// 			"opencv-appsink", "Element application sink",
// 			"0.1", appsink_plugin_init, "LGPL", "highgui", "opencv",
// 			"http://opencvlibrary.sourceforge.net/");

		isInited = true;
	}

	const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"};
//	printf("entered capturecreator %s\n", sourcetypes[type]);

	GstElement *source = gst_element_factory_make(sourcetypes[type], NULL);
	if(!source)
		return 0;

	if(type == CV_CAP_GSTREAMER_FILE)
		g_object_set(G_OBJECT(source), "location", filename, NULL);

	GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL);

	GstElement *sink = gst_element_factory_make("opencv-appsink", NULL);
	GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
//	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(sink), false);
//	g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL);

	GstElement *decodebin = gst_element_factory_make("decodebin", NULL);
	g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour);

	GstElement *pipeline = gst_pipeline_new (NULL);

	gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL);

//	printf("added many\n");

	switch(type) {
	case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps
		caps = gst_caps_new_simple("video/x-raw-rgb",
					   "width", G_TYPE_INT, 640,
					   "height", G_TYPE_INT, 480,
					   "framerate", GST_TYPE_FRACTION, 30, 1,
					   NULL);
		if(!gst_element_link_filtered(source, decodebin, caps)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		gst_caps_unref(caps);
		break;
	case CV_CAP_GSTREAMER_V4L:
	case CV_CAP_GSTREAMER_1394:
	case CV_CAP_GSTREAMER_FILE:
		if(!gst_element_link(source, decodebin)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		break;
	}

	if(!gst_element_link(colour, sink)) {
		CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink\n");
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("linked, pausing\n");

	if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) ==
	   GST_STATE_CHANGE_FAILURE) {
		CV_WARN("GStreamer: unable to set pipeline to paused\n");
//		icvHandleMessage(capture);
//		cvReleaseCapture((CvCapture **)(void *)&capture);
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("state now paused\n");

	// construct capture struct
	capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
	memset(capture, 0, sizeof(CvCapture_GStreamer));
	capture->type = type;
	capture->pipeline = pipeline;
	capture->source = source;
	capture->decodebin = decodebin;
	capture->colour = colour;
	capture->appsink = sink;

	icvHandleMessage(capture);

	OPENCV_ASSERT(capture,
                      "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture");

//	GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline));
//	printf("clock %s\n", gst_object_get_name(GST_OBJECT(clock)));

	__END__;

	return capture;
}
bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        printf("Cannot create Gstreamer pipeline\n");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create v4l2src
    GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL);
    if (v4l2src == NULL)
    {
        printf("Cannot create v4l2src\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream cameraDev;
    cameraDev << "/dev/video" << cameraIdx;
    g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), v4l2src);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        printf("Cannot create %s element\n", COLOR_ELEM);
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), color);

    // create appsink element
    sink = gst_element_factory_make("appsink", NULL);
    if (sink == NULL)
    {
        printf("Cannot create appsink element\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), sink);

    // if initial values for FrameSource::Parameters are not
    // specified, let's set them manually to prevent very huge images
    if (configuration.frameWidth == (vx_uint32)-1)
        configuration.frameWidth = 1920;
    if (configuration.frameHeight == (vx_uint32)-1)
        configuration.frameHeight = 1080;
    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = 30;

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_v42lsrc = gst_caps_new_simple ("video/x-raw-rgb",
                 "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth,
                 "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight,
                 "framerate", GST_TYPE_FRACTION, (int)configuration.fps,
                 NULL);
#else
    std::ostringstream stream;
    stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth <<
              "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";

    GstCaps* caps_v42lsrc = gst_caps_from_string(stream.str().c_str());
#endif

    if (caps_v42lsrc == NULL)
    {
        printf("Failed to create caps\n");
        FinalizeGstPipeLine();

        return false;
    }

    // link elements
    if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc))
    {
        printf("GStreamer: cannot link v4l2src -> color using caps\n");
        FinalizeGstPipeLine();
        gst_caps_unref(caps_v42lsrc);

        return false;
    }
    gst_caps_unref(caps_v42lsrc);

    // link elements
    if (!gst_element_link(color, sink))
    {
        printf("GStreamer: cannot link color -> appsink\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), true);

    // do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_appsink = gst_caps_new_simple("video/x-raw-rgb",
                                                "bpp",        G_TYPE_INT, 24,
                                                "red_mask",   G_TYPE_INT, 0xFF0000,
                                                "green_mask", G_TYPE_INT, 0x00FF00,
                                                "blue_mask",  G_TYPE_INT, 0x0000FF,
                                                NULL);
#else
    // support 1 and 3 channel 8 bit data
    GstCaps* caps_appsink = gst_caps_from_string("video/x-raw, format=(string){RGB, GRAY8};");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink);
    gst_caps_unref(caps_appsink);

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        printf("GStreamer: unable to start playback\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::unique_ptr<GstPad, GStreamerObjectDeleter> pad(gst_element_get_static_pad(color, "src"));
#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_caps(pad.get()));
#else
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_current_caps(pad.get()));
#endif

    const GstStructure *structure = gst_caps_get_structure(bufferCaps.get(), 0);

    int width, height;
    if (!gst_structure_get_int(structure, "width", &width))
    {
        handleGStreamerMessages();
        printf("Cannot query video width\n");
    }

    if (!gst_structure_get_int(structure, "height", &height))
    {
        handleGStreamerMessages();
        printf("Cannot query video height\n");
    }

    configuration.frameWidth = static_cast<vx_uint32>(width);
    configuration.frameHeight = static_cast<vx_uint32>(height);

    gint num = 0, denom = 1;
    if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
    {
        handleGStreamerMessages();
        printf("Cannot query video fps\n");
    }

    configuration.fps = static_cast<float>(num) / denom;
    end = false;

    return true;
}
bool GStreamerWrapper::open( std::string strFilename, bool bGenerateVideoBuffer, bool bGenerateAudioBuffer )
{
	if( m_bFileIsOpen )
	{
		stop();
		close();
	}

	// init property variables
	m_iNumVideoStreams = 0;
	m_iNumAudioStreams = 0;
	m_iCurrentVideoStream = 0;
	m_iCurrentAudioStream = 0;
	m_iWidth = m_iHeight = 0;
	m_iCurrentFrameNumber = 0;		// set to invalid, as it is not decoded yet
	m_dCurrentTimeInMs = 0;			// set to invalid, as it is not decoded yet
	m_bIsAudioSigned = false;
	m_bIsNewVideoFrame = false;
	m_iNumAudioChannels = 0;
	m_iAudioSampleRate = 0;
	m_iAudioBufferSize = 0;
	m_iAudioWidth = 0;
	m_AudioEndianness = LITTLE_ENDIAN;
	m_fFps = 0;
	m_dDurationInMs = 0;
	m_iNumberOfFrames = 0;

	m_fVolume = 1.0f;
	m_fSpeed = 1.0f;
	m_PlayDirection = FORWARD;
	m_CurrentPlayState = NOT_INITIALIZED;
	m_LoopMode = LOOP;
	m_strFilename = strFilename;

#ifdef THREADED_MESSAGE_HANDLER
		m_MsgHandlingThread = std::thread( std::bind( threadedMessageHandler, this ) );
#endif


	////////////////////////////////////////////////////////////////////////// PIPELINE
	// Init main pipeline --> playbin2
	m_GstPipeline = gst_element_factory_make( "playbin2", "pipeline" );

	// Check and re-arrange filename string
	if ( strFilename.find( "file:/", 0 ) == std::string::npos &&
		 strFilename.find( "file:///", 0 ) == std::string::npos &&
		 strFilename.find( "http://", 0 ) == std::string::npos )
	{
		strFilename = "file:/" + strFilename;
	}

	// Open Uri
	g_object_set( m_GstPipeline, "uri", strFilename.c_str(), NULL );


	////////////////////////////////////////////////////////////////////////// VIDEO SINK
	// Extract and Config Video Sink
	if ( bGenerateVideoBuffer )
	{
		// Create the video appsink and configure it
		m_GstVideoSink = gst_element_factory_make( "appsink", "videosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstVideoSink ), true );
		gst_app_sink_set_max_buffers( GST_APP_SINK( m_GstVideoSink ), 8 );
		gst_app_sink_set_drop( GST_APP_SINK( m_GstVideoSink ),true );
		gst_base_sink_set_max_lateness( GST_BASE_SINK( m_GstVideoSink ), -1);

		// Set some fix caps for the video sink
		// It would seem that GStreamer then tries to transform any incoming video stream according to these caps
		GstCaps* caps = gst_caps_new_simple( "video/x-raw-rgb",
			"bpp", G_TYPE_INT, 24,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL );


		gst_app_sink_set_caps( GST_APP_SINK( m_GstVideoSink ), caps );
		gst_caps_unref( caps );

		// Set the configured video appsink to the main pipeline
		g_object_set( m_GstPipeline, "video-sink", m_GstVideoSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstVideoSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Video Sink callback methods
		m_GstVideoSinkCallbacks.eos = &GStreamerWrapper::onEosFromVideoSource;
		m_GstVideoSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromVideoSource;
		m_GstVideoSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromVideoSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstVideoSink ), &m_GstVideoSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct show as playback plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* videoSink = gst_element_factory_make( "directdrawsink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#elif defined LINUX
		GstElement* videoSink = gst_element_factory_make( "xvimagesink", NULL );    //possible alternatives: ximagesink (no (gpu) fancy stuff) or better: cluttersink
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#else // Use Mac OSX plugin otherwise
		GstElement* videoSink = gst_element_factory_make( "osxvideosink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// AUDIO SINK
	// Extract and config Audio Sink
	if ( bGenerateAudioBuffer )
	{
		// Create and configure audio appsink
		m_GstAudioSink = gst_element_factory_make( "appsink", "audiosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstAudioSink ), true );
		// Set the configured audio appsink to the main pipeline
		g_object_set( m_GstPipeline, "audio-sink", m_GstAudioSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstAudioSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Audio Sink callback methods
		m_GstAudioSinkCallbacks.eos = &GStreamerWrapper::onEosFromAudioSource;
		m_GstAudioSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromAudioSource;
		m_GstAudioSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromAudioSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstAudioSink ), &m_GstAudioSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct sound plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* audioSink = gst_element_factory_make( "directsoundsink", NULL );
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#elif defined LINUX
		GstElement* audioSink = gst_element_factory_make( "pulsesink", NULL );  //alternative: alsasink
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#else // Use Mac OSC plugin otherwise
		GstElement* audioSink = gst_element_factory_make( "osxaudiosink", NULL );
		g_object_set ( m_GstPipeline,"audio-sink", audioSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// BUS
	// Set GstBus
	m_GstBus = gst_pipeline_get_bus( GST_PIPELINE( m_GstPipeline ) );

	if ( m_GstPipeline != NULL )
	{
//just add this callback for threaded message handling
#ifdef THREADED_MESSAGE_HANDLER
		gst_bus_add_watch (m_GstBus, onHandleGstMessages, this );
#endif
		// We need to stream the file a little bit in order to be able to retrieve information from it
		gst_element_set_state( m_GstPipeline, GST_STATE_READY );
		gst_element_set_state( m_GstPipeline, GST_STATE_PAUSED );

		// For some reason this is needed in order to gather video information such as size, framerate etc ...
		GstState state;
		gst_element_get_state( m_GstPipeline, &state, NULL, 2 * GST_SECOND );
		m_CurrentPlayState = OPENED;
	}

	// Retrieve and store all relevant Media Information
	retrieveVideoInfo();

	if( !hasVideo() && !hasAudio() )	// is a valid multimedia file?
	{
		close();
		return false;
	}

	// Print Media Info
	printMediaFileInfo();

	// TODO: Check if everything was initialized correctly
	// A file has been opened
	m_bFileIsOpen = true;

	return true;
}
Пример #17
0
bool ofGstVideoPlayer::loadMovie(string name){
	close();
	if( name.find( "://",0 ) == string::npos){
		name 			= "file://"+ofToDataPath(name,true);
		bIsStream		= false;
	}else{
		bIsStream		= true;
	}
	ofLog(OF_LOG_VERBOSE,"loading "+name);

	//GMainLoop* loop		=
	g_main_loop_new (NULL, FALSE);

	GstElement * gstPipeline = gst_element_factory_make("playbin2","player");
	g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL);

	// create the oF appsink for video rgb without sync to clock
	GstElement * gstSink = gst_element_factory_make("appsink", "sink");
	int bpp;
	string mime;
	switch(internalPixelFormat){
	case OF_PIXELS_MONO:
		mime = "video/x-raw-gray";
		bpp = 8;
		break;
	case OF_PIXELS_RGB:
		mime = "video/x-raw-rgb";
		bpp = 24;
		break;
	case OF_PIXELS_RGBA:
	case OF_PIXELS_BGRA:
		mime = "video/x-raw-rgb";
		bpp = 32;
		break;
	default:
		mime = "video/x-raw-rgb";
		bpp=24;
		break;
	}

	GstCaps *caps = gst_caps_new_simple(mime.c_str(),
										"bpp", G_TYPE_INT, bpp,
										//"depth", G_TYPE_INT, 24,
										/*"endianness",G_TYPE_INT,4321,
										"red_mask",G_TYPE_INT,0xff0000,
										"green_mask",G_TYPE_INT,0x00ff00,
										"blue_mask",G_TYPE_INT,0x0000ff,*/


										NULL);
	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), false);
	gst_app_sink_set_drop (GST_APP_SINK(gstSink),true);

	g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL);


	GstElement *audioSink = gst_element_factory_make("gconfaudiosink", NULL);
	g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL);

	videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream);
	if(!bIsStream) return allocate();
	else return true;
}
Пример #18
0
bool ofGstVideoPlayer::loadMovie(string name){
	close();
	if( name.find( "file://",0 ) != string::npos){
		bIsStream		= false;
	}else if( name.find( "://",0 ) == string::npos){
		name 			= "file://"+ofToDataPath(name,true);
		bIsStream		= false;
	}else{
		bIsStream		= true;
	}
	ofLog(OF_LOG_VERBOSE,"loading "+name);

	ofGstUtils::startGstMainLoop();

	GstElement * gstPipeline = gst_element_factory_make("playbin2","player");
	g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL);

	// create the oF appsink for video rgb without sync to clock
	GstElement * gstSink = gst_element_factory_make("appsink", "app_sink");

	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);
	gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink), 8);
	gst_app_sink_set_drop (GST_APP_SINK(gstSink),true);
	gst_base_sink_set_max_lateness  (GST_BASE_SINK(gstSink), -1);

	int bpp;
	string mime;
	switch(internalPixelFormat){
	case OF_PIXELS_MONO:
		mime = "video/x-raw-gray";
		bpp = 8;
		break;
	case OF_PIXELS_RGB:
		mime = "video/x-raw-rgb";
		bpp = 24;
		break;
	case OF_PIXELS_RGBA:
	case OF_PIXELS_BGRA:
		mime = "video/x-raw-rgb";
		bpp = 32;
		break;
	default:
		mime = "video/x-raw-rgb";
		bpp=24;
		break;
	}

	GstCaps *caps = gst_caps_new_simple(mime.c_str(),
										"bpp", G_TYPE_INT, bpp,
										"depth", G_TYPE_INT, 24,
										"endianness",G_TYPE_INT,4321,
										"red_mask",G_TYPE_INT,0xff0000,
										"green_mask",G_TYPE_INT,0x00ff00,
										"blue_mask",G_TYPE_INT,0x0000ff,
										"alpha_mask",G_TYPE_INT,0x000000ff,


										NULL);
	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);

	if(threadAppSink){
		GstElement * appQueue = gst_element_factory_make("queue","appsink_queue");
		g_object_set(G_OBJECT(appQueue), "leaky", 0, "silent", 1, (void*)NULL);
		GstElement* appBin = gst_bin_new("app_bin");
		gst_bin_add(GST_BIN(appBin), appQueue);
		GstPad* appQueuePad = gst_element_get_static_pad(appQueue, "sink");
		GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", appQueuePad);
		gst_object_unref(appQueuePad);
		gst_element_add_pad(appBin, ghostPad);

		gst_bin_add_many(GST_BIN(appBin), gstSink, NULL);
		gst_element_link_many(appQueue, gstSink, NULL);

		g_object_set (G_OBJECT(gstPipeline),"video-sink",appBin,(void*)NULL);
	}else{
		g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL);
	}

#ifdef TARGET_WIN32
	GstElement *audioSink = gst_element_factory_make("directsoundsink", NULL);
	g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL);

#endif


	videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream);
	if(!bIsStream) return allocate(bpp);
	else return true;
}
Пример #19
0
bool ofGstVideoPlayer::createPipeline(string name){
#ifndef OF_USE_GST_GL
#if GST_VERSION_MAJOR==0
	GstCaps *caps;
	int bpp;
	switch(internalPixelFormat){
	case OF_PIXELS_GRAY:
		bpp = 8;
		caps = gst_caps_new_simple("video/x-raw-gray",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 8,
			NULL);
		break;
	case OF_PIXELS_RGB:
		bpp = 24;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			NULL);
		break;
	case OF_PIXELS_RGBA:
		bpp = 32;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 32,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff000000,
			"green_mask",G_TYPE_INT,0x00ff0000,
			"blue_mask",G_TYPE_INT,0x0000ff00,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL);
		break;
	case OF_PIXELS_BGRA:
		bpp = 32;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 32,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0x0000ff00,
			"green_mask",G_TYPE_INT,0x00ff0000,
			"blue_mask",G_TYPE_INT,0xff000000,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL);
		break;
	default:
		bpp = 32;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			NULL);
		break;
	}
#else
	string mime="video/x-raw";

	GstCaps *caps;
	if(internalPixelFormat==OF_PIXELS_NATIVE){
		caps = gst_caps_from_string((mime + ",format={RGBA,BGRA,RGB,BGR,RGB16,GRAY8,YV12,I420,NV12,NV21,YUY2}").c_str());
	}else{
		string format = ofGstVideoUtils::getGstFormatName(internalPixelFormat);
		caps = gst_caps_new_simple(mime.c_str(),
											"format", G_TYPE_STRING, format.c_str(),
											NULL);
	}
#endif

	#if GST_VERSION_MAJOR==0
		GstElement * gstPipeline = gst_element_factory_make("playbin2","player");
	#else
		GstElement * gstPipeline = gst_element_factory_make("playbin","player");
	#endif
	g_object_ref_sink(gstPipeline);
	g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL);

	// create the oF appsink for video rgb without sync to clock
	GstElement * gstSink = gst_element_factory_make("appsink", "app_sink");
	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);

	if(threadAppSink){
		GstElement * appQueue = gst_element_factory_make("queue","appsink_queue");
		g_object_set(G_OBJECT(appQueue), "leaky", 0, "silent", 1, (void*)NULL);
		GstElement* appBin = gst_bin_new("app_bin");
		gst_bin_add(GST_BIN(appBin), appQueue);
		GstPad* appQueuePad = gst_element_get_static_pad(appQueue, "sink");
		GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", appQueuePad);
		gst_object_unref(appQueuePad);
		gst_element_add_pad(appBin, ghostPad);

		gst_bin_add(GST_BIN(appBin), gstSink);
		gst_element_link(appQueue, gstSink);

		g_object_set (G_OBJECT(gstPipeline),"video-sink",appBin,(void*)NULL);
	}else{
		g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL);
	}

	#ifdef TARGET_WIN32
		GstElement *audioSink = gst_element_factory_make("directsoundsink", NULL);
		g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL);
	#endif

	return videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream);

#else
	/*auto gstPipeline = gst_parse_launch(("uridecodebin uri=" + name + " ! glcolorscale name=gl_filter ! appsink name=app_sink").c_str(),NULL);
	auto gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"app_sink");
	auto glfilter = gst_bin_get_by_name(GST_BIN(gstPipeline),"gl_filter");
	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);

	glXMakeCurrent (ofGetX11Display(), None, 0);
	glDisplay = (GstGLDisplay *)gst_gl_display_x11_new_with_display(ofGetX11Display());
	glContext = gst_gl_context_new_wrapped (glDisplay, (guintptr) ofGetGLXContext(),
	    		  GST_GL_PLATFORM_GLX, GST_GL_API_OPENGL);

	g_object_set (G_OBJECT (glfilter), "other-context", glContext, NULL);

	// FIXME: this seems to be the way to add the context in 1.4.5
	//
	// GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE(gstPipeline));
	// gst_bus_enable_sync_message_emission (bus);
	// g_signal_connect (bus, "sync-message", G_CALLBACK (sync_bus_call), this);
	// gst_object_unref(bus);

	auto ret = videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream);
	glXMakeCurrent (ofGetX11Display(), ofGetX11Window(), ofGetGLXContext());
	return ret;*/

	return videoUtils.setPipeline("uridecodebin uri=" + name,internalPixelFormat,bIsStream,-1,-1);
	//return videoUtils.setPipeline("filesrc location=" + name + " ! qtdemux ",internalPixelFormat,bIsStream,-1,-1);
#endif
}
Пример #20
0
/*!
 * \brief CvCapture_GStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CV_CAP_GSTREAMER_*
 * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 *  TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
 *  I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
 *
 */
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    gst_initializer::init();

    bool file = false;
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    GstElementFactory * testfac;
    GstStateChangeReturn status;

    if (type == CV_CAP_GSTREAMER_V4L){
        testfac = gst_element_factory_find("v4lsrc");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4lsrc ! "COLOR_ELEM" ! appsink";
    }
    if (type == CV_CAP_GSTREAMER_V4L2){
        testfac = gst_element_factory_find("v4l2src");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4l2src ! "COLOR_ELEM" ! appsink";
    }


    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(uri)
            {
                file = true;
            }
            else
            {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin)
            {
                fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
#if GST_VERSION_MAJOR == 0
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
#else
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
#endif
            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
        }
        g_free(protocol);

        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }

    if(manualpipeline)
    {
        GstIterator *it = NULL;
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));

        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                  sink = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);


        if (!sink){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#endif
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new(NULL);
        // videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert)
        //automatically selects the correct colorspace conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);

        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                pipeline = NULL;
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            pipeline = NULL;
            return false;
        }
    }

    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    caps = gst_caps_new_simple("video/x-raw-rgb",
                               "bpp",        G_TYPE_INT, 24,
                               "red_mask",   G_TYPE_INT, 0x0000FF,
                               "green_mask", G_TYPE_INT, 0x00FF00,
                               "blue_mask",  G_TYPE_INT, 0xFF0000,
                               NULL);
#else
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);

    // For video files only: set pipeline to PAUSED state to get its duration
    if (file)
    {
        status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            // wait for status update
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage(pipeline);
            gst_object_unref(pipeline);
            pipeline = NULL;
            CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
            return false;
        }

        GstFormat format;

        format = GST_FORMAT_DEFAULT;
#if GST_VERSION_MAJOR == 0
        if(!gst_element_query_duration(sink, &format, &duration))
#else
        if(!gst_element_query_duration(sink, format, &duration))
#endif
        {
            handleMessage(pipeline);
            CV_WARN("GStreamer: unable to query duration of stream");
            duration = -1;
            return true;
        }
    }
    else
    {
        duration = -1;
    }

    __END__;

    return true;
}
Пример #21
0
int main(int argc, char** argv) {
	char *config = getenv("GSCAM_CONFIG");
	if (config == NULL) {
		std::cout << "Problem getting GSCAM_CONFIG variable." << std::endl;
		exit(-1);
	}

	gst_init(0,0);
	std::cout << "Gstreamer Version: " << gst_version_string() << std::endl;

	GError *error = 0; //assignment to zero is a gst requirement
	GstElement *pipeline = gst_parse_launch(config,&error);
	if (pipeline == NULL) {
		std::cout << error->message << std::endl;
		exit(-1);
	}
	GstElement * sink = gst_element_factory_make("appsink",NULL);
	GstCaps * caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
	gst_caps_unref(caps);

	gst_base_sink_set_sync(GST_BASE_SINK(sink), TRUE);

	if(GST_IS_PIPELINE(pipeline)) {
	    GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline), GST_PAD_SRC);
	    g_assert(outpad);
	    GstElement *outelement = gst_pad_get_parent_element(outpad);
	    g_assert(outelement);
	    gst_object_unref(outpad);


	    if(!gst_bin_add(GST_BIN(pipeline), sink)) {
		fprintf(stderr, "gst_bin_add() failed\n"); // TODO: do some unref
		gst_object_unref(outelement);
		gst_object_unref(pipeline);
		return -1;
	    }

	    if(!gst_element_link(outelement, sink)) {
		fprintf(stderr, "GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement));
		gst_object_unref(outelement);
		gst_object_unref(pipeline);
		return -1;
	    }

	    gst_object_unref(outelement);
	} else {
	    GstElement* launchpipe = pipeline;
	    pipeline = gst_pipeline_new(NULL);
	    g_assert(pipeline);

	    gst_object_unparent(GST_OBJECT(launchpipe));

	    gst_bin_add_many(GST_BIN(pipeline), launchpipe, sink, NULL);

	    if(!gst_element_link(launchpipe, sink)) {
		fprintf(stderr, "GStreamer: cannot link launchpipe -> sink\n");
		gst_object_unref(pipeline);
		return -1;
	    }
	}

	gst_element_set_state(pipeline, GST_STATE_PAUSED);

	if (gst_element_get_state(pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
		std::cout << "Failed to PAUSE." << std::endl;
		exit(-1);
	} else {
		std::cout << "stream is PAUSED." << std::endl;
	}

	// We could probably do something with the camera name, check
	// errors or something, but at the moment, we don't care.
	std::string camera_name;
	if (camera_calibration_parsers::readCalibrationIni("../camera_parameters.txt", camera_name, camera_info)) {
	  ROS_INFO("Successfully read camera calibration.  Rerun camera calibrator if it is incorrect.");
	}
	else {
	  ROS_ERROR("No camera_parameters.txt file found.  Use default file if no other is available.");
	}

	ros::init(argc, argv, "gscam_publisher");
	ros::NodeHandle nh;

	int preroll;
	nh.param("brown/gscam/preroll", preroll, 0);
	if (preroll) {
		//The PAUSE, PLAY, PAUSE, PLAY cycle is to ensure proper pre-roll
		//I am told this is needed and am erring on the side of caution.
		gst_element_set_state(pipeline, GST_STATE_PLAYING);

		if (gst_element_get_state(pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
			std::cout << "Failed to PLAY." << std::endl;
			exit(-1);
		} else {
			std::cout << "stream is PLAYING." << std::endl;
		}

		gst_element_set_state(pipeline, GST_STATE_PAUSED);

		if (gst_element_get_state(pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
			std::cout << "Failed to PAUSE." << std::endl;
			exit(-1);
		} else {
			std::cout << "stream is PAUSED." << std::endl;
		}
	}

	image_transport::ImageTransport it(nh);
//	image_transport::CameraPublisher pub = it.advertiseCamera("gscam/image_raw", 1);

//------------------------------------
// Added by jschoi, 2012-08-13
	char topic_name[32];
	if(argc==1)
		sprintf(topic_name, "gscam/image_raw");
	else
		sprintf(topic_name,"%s",argv[1]);	// To get the name of topic from the first arguement
//------------------------------------
	image_transport::CameraPublisher pub = it.advertiseCamera(topic_name, 1);

	ros::ServiceServer set_camera_info = nh.advertiseService("gscam/set_camera_info", setCameraInfo);

	std::cout << "Processing..." << std::endl;

	//processVideo
	rosPad = false;
	gstreamerPad = true;
	gst_element_set_state(pipeline, GST_STATE_PLAYING);
	while(nh.ok()) {
                // This should block until a new frame is awake, this way, we'll run at the 
                // actual capture framerate of the device.
		GstBuffer* buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
		if (!buf) break;

		GstPad* pad = gst_element_get_static_pad(sink, "sink");
		const GstCaps *caps = gst_pad_get_negotiated_caps(pad);
		GstStructure *structure = gst_caps_get_structure(caps,0);
		gst_structure_get_int(structure,"width",&width);
		gst_structure_get_int(structure,"height",&height);

		sensor_msgs::Image msg;
		msg.width = width; 
		msg.height = height;
		msg.encoding = "rgb8";
		msg.is_bigendian = false;
		msg.step = width*3;
		msg.data.resize(width*height*3);
		std::copy(buf->data, buf->data+(width*height*3), msg.data.begin());

		pub.publish(msg, camera_info);

                gst_buffer_unref(buf);

		ros::spinOnce();

	}

	//close out
	std::cout << "\nquitting..." << std::endl;
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);

	return 0;
}
Пример #22
0
/*!
 * \brief OpenIMAJCapGStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CAP_GSTREAMER_*
 * \param filename Filename to open in case of CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, OpenIMAJCapGStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 */
bool OpenIMAJCapGStreamer::open(const char* filename )
{
    if(!isInited) {
        //FIXME: threadsafety
        gst_init (NULL, NULL);
        isInited = true;
    }

    
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    
    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                //close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }
    
    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);

            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        }
        g_free(protocol);
        
        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }
    
    if(manualpipeline)
    {
        GstIterator *it = NULL;
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));
        
        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;
        
        while (!done) {
            switch (gst_iterator_next (it, &value)) {
                case GST_ITERATOR_OK:
                    element = GST_ELEMENT (g_value_get_object (&value));
                    name = gst_element_get_name(element);
                    if (name){
                        if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                            sink = GST_ELEMENT ( gst_object_ref (element) );
                            done = TRUE;
                        }
                        g_free(name);
                    }
                    g_value_unset (&value);
                    
                    break;
                case GST_ITERATOR_RESYNC:
                    gst_iterator_resync (it);
                    break;
                case GST_ITERATOR_ERROR:
                case GST_ITERATOR_DONE:
                    done = TRUE;
                    break;
            }
        }
        gst_iterator_free (it);
        
        
        if (!sink){
            //ERROR(1, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
        
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);
        // videoconvert (in 0.10: ffmpegcolorspace) automatically selects the correct colorspace
        // conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);
        
        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        
        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                //ERROR(1, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }
        
        if(!gst_element_link(color, sink)) {
            //ERROR(1, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }
    
    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);
    
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);
    
    //we do not start recording here just yet.
    // the user probably wants to set capture properties first, so start recording whenever the first frame is requested
    
    return true;
}
Пример #23
0
// ----------------------------------------------------------------------------
// Handle the "pad-added" message
void
GStreamerImportFileHandle::OnPadAdded(GstPad *pad)
{
   // Retrieve the stream caps...skip stream if unavailable
   GstCaps *caps = gst_pad_get_current_caps(pad);
   if (!caps)
   {
      WARN(mPipeline, ("OnPadAdded: unable to retrieve stream caps"));
      return;
   }

   // Get the caps structure...no need to release
   GstStructure *str = gst_caps_get_structure(caps, 0);
   if (!str)
   {
      WARN(mPipeline, ("OnPadAdded: unable to retrieve caps structure"));
      gst_caps_unref(caps);
      return;
   }

   // Only accept audio streams...no need to release
   const gchar *name = gst_structure_get_name(str);
   if (!g_strrstr(name, "audio"))
   {
      WARN(mPipeline, ("OnPadAdded: bypassing '%s' stream", name));
      gst_caps_unref(caps);
      return;
   }

   // Allocate a new stream context
   GStreamContext *c = g_new0(GStreamContext, 1);
   if (!c)
   {
      WARN(mPipeline, ("OnPadAdded: unable to allocate stream context"));
      gst_caps_unref(caps);
      return;
   }

   // Set initial state
   c->mUse = true;

   // Always add it to the context list to keep the number of contexts
   // in sync with the number of streams
   g_mutex_lock(&mStreamsLock);
   g_ptr_array_add(mStreams, c);
   g_mutex_unlock(&mStreamsLock);

   // Need pointer to context during pad removal (pad-remove signal)
   SETCTX(pad, c);

   // Save the stream's start time and duration
   gst_pad_query_position(pad, GST_FORMAT_TIME, &c->mPosition);
   gst_pad_query_duration(pad, GST_FORMAT_TIME, &c->mDuration);

   // Retrieve the number of channels and validate
   gint channels = -1;
   gst_structure_get_int(str, "channels", &channels);
   if (channels <= 0)
   {
      WARN(mPipeline, ("OnPadAdded: channel count is invalid %d", channels));
      gst_caps_unref(caps);
      return;
   }
   c->mNumChannels = channels;

   // Retrieve the sample rate and validate
   gint rate = -1;
   gst_structure_get_int(str, "rate", &rate);
   if (rate <= 0)
   {
      WARN(mPipeline, ("OnPadAdded: sample rate is invalid %d", rate));
      gst_caps_unref(caps);
      return;
   }
   c->mSampleRate = (double) rate;

   c->mType = g_strdup(name);
   if (c->mType == NULL)
   {
      WARN(mPipeline, ("OnPadAdded: unable to allocate audio type"));
      gst_caps_unref(caps);
      return;
   }

   // Done with capabilities
   gst_caps_unref(caps);

   // Create audioconvert element
   c->mConv = gst_element_factory_make("audioconvert", NULL);
   if (!c->mConv)
   {
      WARN(mPipeline, ("OnPadAdded: failed to create audioconvert element"));
      return;
   }

   // Create appsink element
   c->mSink = gst_element_factory_make("appsink", NULL);
   if (!c->mSink)
   {
      WARN(mPipeline, ("OnPadAdded: failed to create appsink element"));
      return;
   }
   SETCTX(c->mSink, c);

   // Set the appsink callbacks and add the context pointer
   gst_app_sink_set_callbacks(GST_APP_SINK(c->mSink), &AppSinkCallbacks, this, NULL);

   // Set the capabilities that we desire
   caps = gst_static_caps_get(&supportedCaps);
   if (!caps)
   {
      WARN(mPipeline, ("OnPadAdded: failed to create static caps"));
      return;
   }
   gst_app_sink_set_caps(GST_APP_SINK(c->mSink), caps);
   gst_caps_unref(caps);

   // Do not sync to the clock...process as quickly as possible
   gst_base_sink_set_sync(GST_BASE_SINK(c->mSink), FALSE);

   // Don't drop buffers...allow queue to build unfettered
   gst_app_sink_set_drop(GST_APP_SINK(c->mSink), FALSE);

   // Add both elements to the pipeline
   gst_bin_add_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL);

   // Link them together
   if (!gst_element_link(c->mConv, c->mSink))
   {
      WARN(mPipeline, ("OnPadAdded: failed to link autioconvert and appsink"));
      return;
   }

   // Link the audiconvert sink pad to the src pad
   GstPadLinkReturn ret = GST_PAD_LINK_OK;
   GstPad *convsink = gst_element_get_static_pad(c->mConv, "sink");
   if (convsink)
   {
      ret = gst_pad_link(pad, convsink);
      gst_object_unref(convsink);
   }
   if (!convsink || ret != GST_PAD_LINK_OK)
   {
      WARN(mPipeline, ("OnPadAdded: failed to link uridecodebin to audioconvert - %d", ret));
      return;
   }

   // Synchronize audioconvert state with parent
   if (!gst_element_sync_state_with_parent(c->mConv))
   {
      WARN(mPipeline, ("OnPadAdded: unable to sync audioconvert state"));
      return;
   }

   // Synchronize appsink state with parent
   if (!gst_element_sync_state_with_parent(c->mSink))
   {
      WARN(mPipeline, ("OnPadAdded: unable to sync appaink state"));
      return;
   }

   return;
}