Beispiel #1
0
static void
gst_frame_store_init (GstFrameStore * filter, GstFrameStoreClass * klass)
{
  gst_element_create_all_pads (GST_ELEMENT(filter));

  filter->srcpad = gst_element_get_pad (GST_ELEMENT(filter), "src");

  gst_pad_set_link_function (filter->srcpad, gst_frame_store_link_src);
  gst_pad_set_getcaps_function (filter->srcpad, gst_frame_store_getcaps);

  filter->sinkpad = gst_element_get_pad (GST_ELEMENT(filter), "sink");

  gst_pad_set_chain_function (filter->sinkpad, gst_frame_store_chain);
  gst_pad_set_event_function (filter->sinkpad, gst_frame_store_sink_event);
  gst_pad_set_getcaps_function (filter->sinkpad, gst_frame_store_getcaps);

  gst_frame_store_reset (filter);

  filter->range_offset = 0;
  filter->range_size = 10;
  filter->frames = g_malloc0(sizeof(GstBuffer*)*filter->range_size);
  filter->frame_number = 0;
  filter->pushed_frame_number = -1;
  filter->need_newsegment = TRUE;

  filter->cond = g_cond_new ();
  filter->lock = g_mutex_new ();
  filter->srcresult = GST_FLOW_WRONG_STATE;
}
Beispiel #2
0
gint
main (gint argc, gchar ** argv)
{
  GstCaps *caps;
  GstElement *sink, *identity;
  GstElement *pipeline;

  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");
  g_assert (pipeline);
  identity = gst_element_factory_make ("identity", NULL);
  g_assert (identity);
  sink = gst_element_factory_make ("fakesink", NULL);
  g_assert (sink);
  gst_bin_add_many (GST_BIN (pipeline), identity, sink, NULL);
  gst_element_link_filtered (identity, sink,
      gst_caps_new_simple ("audio/x-raw-int", NULL));
  caps = gst_pad_get_caps (gst_element_get_pad (identity, "sink"));
  g_print ("caps:         %s\n", gst_caps_to_string (caps));
  g_assert (!gst_caps_is_any (caps));

  caps = gst_pad_get_allowed_caps (gst_element_get_pad (identity, "sink"));
  g_print ("allowed caps: %s\n", gst_caps_to_string (caps));
  /* get_allowed_caps doesn't mean anything if you aren't connected */
  g_assert (!caps);

  return 0;
}
Beispiel #3
0
void on_pad_added(GstElement * element, GstPad * pad)
{
    GstCaps * caps;
    GstStructure * str;
    GstPad * targetsink = NULL;

    caps = gst_pad_get_caps(pad);
    g_assert(caps != NULL);
    str = gst_caps_get_structure(caps, 0);
    g_assert(str != NULL);

    /* if the file has video and the media type is video connect it to the pipewriter */
    if(g_strrstr(gst_structure_get_name(str), "video"))
    {
        targetsink = gst_element_get_pad(videodec, "sink");
    }
    /* if the file has audio and the media type is audio connect it to the pipewriter */
    else if(g_strrstr(gst_structure_get_name (str), "audio"))
    {
        targetsink = gst_element_get_pad(audiodec, "sink");
    }

    if (targetsink != 0) {
        gst_pad_link(pad, targetsink);
        gst_object_unref(targetsink);
    }
    gst_caps_unref(caps);
}
Beispiel #4
0
bool MediaNode::disconnectNode(QObject *obj)
{
    MediaNode *sink = qobject_cast<MediaNode*>(obj);
    if (root()) {
        // Disconnecting elements while playing or paused seems to cause
        // potential deadlock. Hence we force the pipeline into ready state
        // before any nodes are disconnected.
        gst_element_set_state(root()->pipeline(), GST_STATE_READY);    

        Q_ASSERT(sink->root()); //sink has to have a root since it is onnected

        if (sink->description() & (AudioSink)) {
            GstPad *sinkPad = gst_element_get_pad(sink->audioElement(), "sink");
            // Release requested src pad from tee
            GstPad *requestedPad = gst_pad_get_peer(sinkPad);
            if (requestedPad) {
                gst_element_release_request_pad(m_audioTee, requestedPad);
                gst_object_unref(requestedPad);
            }
            if (GST_ELEMENT_PARENT(sink->audioElement()))
                gst_bin_remove(GST_BIN(root()->audioGraph()), sink->audioElement());
            gst_object_unref(sinkPad);
        }

        if (sink->description() & (VideoSink)) {
            GstPad *sinkPad = gst_element_get_pad(sink->videoElement(), "sink");
            // Release requested src pad from tee
            GstPad *requestedPad = gst_pad_get_peer(sinkPad);
            if (requestedPad) {
                gst_element_release_request_pad(m_videoTee, requestedPad);
                gst_object_unref(requestedPad);
            }
            if (GST_ELEMENT_PARENT(sink->videoElement()))
                gst_bin_remove(GST_BIN(root()->videoGraph()), sink->videoElement());
            gst_object_unref(sinkPad);
        }

        sink->breakGraph();
        sink->setRoot(0);
    }

    m_videoSinkList.removeAll(obj);
    m_audioSinkList.removeAll(obj);

    if (sink->m_description & AudioSink) {
        // Remove sink from graph
        MediaNodeEvent event(MediaNodeEvent::AudioSinkRemoved, sink);
        mediaNodeEvent(&event);
        return true;
    }

    if ((m_description & VideoSource) && (sink->m_description & VideoSink)) {
        // Remove sink from graph
        MediaNodeEvent event(MediaNodeEvent::VideoSinkRemoved, sink);
        mediaNodeEvent(&event);
        return true;
    }

    return false;
}
Beispiel #5
0
/* Init the video interface. Start GStreamer and create all the elements. */
int video_init(int flags)
{
	GstElement *dec, *vqueue, *vconv, *vscale, *aqueue, *aconv, *ascale;

	if (!gst_init_check(NULL, NULL)) return VIDEO_ERROR;

	status = 0;
	status_mutex = g_mutex_new();
	video_width = video_height = 0;
	apeer = NULL;
	audio_disabled = flags & VIDEO_INIT_NOAUDIO;

	/* Main pipeline */
	pipeline = gst_thread_new("pipeline");
	g_signal_connect(pipeline, "eos", G_CALLBACK (cb_eos), NULL);
	g_signal_connect(pipeline, "error", G_CALLBACK (cb_error), NULL);
	g_signal_connect(pipeline, "state-change", G_CALLBACK (cb_state_change), NULL);
	src = gst_element_factory_make("filesrc", "src");
	dec = gst_element_factory_make("decodebin", "dec");
	g_signal_connect (dec, "new-decoded-pad", G_CALLBACK(cb_new_pad), NULL);
	if (!gst_element_link_many(src, dec, NULL)) return VIDEO_ERROR;
	gst_bin_add_many(GST_BIN(pipeline), src, dec, NULL);

	/* Video thread */
	vthread = gst_thread_new("vthread");
	vqueue = gst_element_factory_make("queue", "vqueue");
	vqueuesink = gst_element_get_pad(vqueue, "sink");
	vconv = gst_element_factory_make("ffmpegcolorspace", "vconv");
	vscale = gst_element_factory_make("videoscale", "vscale");
	vdrop = gst_element_factory_make("videodrop", "vdrop");
	xvsink = gst_element_factory_make("xvimagesink", "xvsink");
	gst_bin_add_many(GST_BIN(vthread), vqueue, vconv, vscale, vdrop, xvsink, NULL);
	if (!gst_element_link_many(vqueue, vconv, vscale, vdrop, xvsink, NULL)) 
		return VIDEO_ERROR;
	gst_object_ref(GST_OBJECT(vthread));

	/* Audio thread */
	if (audio_disabled) return VIDEO_OK;

	athread = gst_thread_new("athread");
	aqueue = gst_element_factory_make("queue", "aqueue");
	aqueuesink = gst_element_get_pad(aqueue, "sink");
	aconv = gst_element_factory_make("audioconvert", "aconv");
	ascale = gst_element_factory_make ("audioscale", "ascale");
	asink = gst_element_factory_make ("alsasink", "asink");
	gst_bin_add_many (GST_BIN (athread), aqueue, aconv, ascale, asink, NULL);
	if (!gst_element_link_many (aqueue, aconv, ascale, asink, NULL))
		return VIDEO_ERROR;
	gst_object_ref(GST_OBJECT(athread));

	return VIDEO_OK;
}
Beispiel #6
0
static void
rb_mtp_sink_init (RBMTPSink *sink, RBMTPSinkClass *klass)
{
    GstPad *pad;

    sink->upload_mutex = g_mutex_new ();
    sink->upload_cond = g_cond_new ();

    /* create actual sink */
    sink->fdsink = gst_element_factory_make ("fdsink", NULL);
    if (sink->fdsink == NULL) {
        g_warning ("couldn't create fdsink element");
        return;
    }

    gst_bin_add (GST_BIN (sink), sink->fdsink);
    gst_object_ref (sink->fdsink);

    /* create ghost pad */
    pad = gst_element_get_pad (sink->fdsink, "sink");
    sink->ghostpad = gst_ghost_pad_new ("sink", pad);
    gst_element_add_pad (GST_ELEMENT (sink), sink->ghostpad);
    gst_object_ref (sink->ghostpad);
    gst_object_unref (pad);

}
Beispiel #7
0
static GstElement *
make_wav_pipeline (const gchar * location)
{
  GstElement *pipeline;
  GstElement *src, *decoder, *audiosink;

  pipeline = gst_pipeline_new ("app");

  src = gst_element_factory_make_or_warn (SOURCE, "src");
  decoder = gst_element_factory_make_or_warn ("wavparse", "decoder");
  audiosink = gst_element_factory_make_or_warn (ASINK, "sink");

  g_object_set (G_OBJECT (src), "location", location, NULL);

  gst_bin_add (GST_BIN (pipeline), src);
  gst_bin_add (GST_BIN (pipeline), decoder);
  gst_bin_add (GST_BIN (pipeline), audiosink);

  gst_element_link (src, decoder);

  setup_dynamic_link (decoder, "src", gst_element_get_pad (audiosink, "sink"),
      NULL);

  return pipeline;
}
Beispiel #8
0
//
// connect decodebin's dynamically created source pads to colourconverter
//
static void icvNewPad(GstElement *decodebin, GstPad *pad, gboolean last, gpointer data)
{
	GstElement *sink = GST_ELEMENT(data);
	GstStructure *str;
	GstPad *sinkpad;
	GstCaps *caps;

	/* link only once */
	sinkpad = gst_element_get_pad(sink, "sink");

	if(GST_PAD_IS_LINKED(sinkpad)) {
		g_print("sink is already linked\n");
		g_object_unref(sinkpad);
		return;
	}

	/* check media type */
	caps = gst_pad_get_caps(pad);
	str = gst_caps_get_structure(caps, 0);
	const char *structname = gst_structure_get_name(str);
//	g_print("new pad %s\n", structname);
	if(!g_strrstr(structname, "video")) {
		gst_caps_unref(caps);
		gst_object_unref(sinkpad);
		return;
	}
	printf("linking pad %s\n", structname);

	/* link'n'play */
	gst_pad_link (pad, sinkpad);

	gst_caps_unref(caps);
	gst_object_unref(sinkpad);
}
gboolean
scope_parser_attach_to_tee(ScopeParser *parser, GstElement *tee)
{
    GstPad *sink_pad;
    
    if(parser == NULL) {
        return FALSE;
    }
    
    parser->tee_pad = gst_element_get_request_pad(tee, "src1");
    if(parser->tee_pad == NULL) {
        g_warning("Could not get a source pad from the tee");
        return FALSE;
    }
    
    parser->fakesink = gst_element_factory_make("fakesink", "fakesink");
    
    if(parser->fakesink == NULL) {
        g_warning("Could not create fakesink element");
        return FALSE;
    }
    
    sink_pad = gst_element_get_pad(parser->fakesink, "sink");
    gst_pad_link(parser->tee_pad, sink_pad);
    
    parser->buffer_probe_id = gst_pad_add_buffer_probe(parser->tee_pad, 
        G_CALLBACK(scope_parser_buffer_probe), parser);
    parser->event_probe_id = gst_pad_add_event_probe(parser->tee_pad, 
        G_CALLBACK(scope_parser_event_probe), parser);
        
    gst_object_unref(parser->tee_pad);
    gst_object_unref(sink_pad);
    
    return TRUE;
}
static void
bbd_new_decoded_pad(GstElement *decodebin, GstPad *pad, 
    gboolean last, gpointer data)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;
    BansheeBpmDetector *detector = (BansheeBpmDetector *)data;

    g_return_if_fail(detector != NULL);

    audiopad = gst_element_get_pad(detector->audioconvert, "sink");
    
    if(GST_PAD_IS_LINKED(audiopad)) {
        g_object_unref(audiopad);
        return;
    }

    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);
    
    if(!g_strrstr(gst_structure_get_name(str), "audio")) {
        gst_caps_unref(caps);
        gst_object_unref(audiopad);
        return;
    }
   
    gst_caps_unref(caps);
    gst_pad_link(pad, audiopad);
}
Beispiel #11
0
static GstElement *
create_thread_ghostpads (void)
{
  GstElement *thread;
  GstElement *element1, *element2;

  thread = gst_thread_new ("testthread");
  element1 = gst_element_new ();
  gst_element_set_name (element1, "test1");
  gst_element_add_pad (element1,
      gst_pad_new_from_template (gst_static_pad_template_get (&srctemplate),
          "src1"));
  gst_bin_add (GST_BIN (thread), element1);
  element2 = gst_element_new ();
  gst_element_set_name (element2, "test2");
  gst_element_add_pad (element1,
      gst_pad_new_from_template (gst_static_pad_template_get (&sinktemplate),
          "sink1"));
  gst_bin_add (GST_BIN (thread), element2);
  gst_element_link (element1, "src1", element2, "sink1");
  gst_element_add_ghost_pad (thread, gst_element_get_pad (element2, "sink1"),
      "sink1");

  return thread;
}
Beispiel #12
0
static void
brasero_transcode_stop_pipeline (BraseroTranscode *transcode)
{
	BraseroTranscodePrivate *priv;
	GstPad *sinkpad;

	priv = BRASERO_TRANSCODE_PRIVATE (transcode);
	if (!priv->pipeline)
		return;

	sinkpad = gst_element_get_pad (priv->sink, "sink");
	if (priv->probe)
		gst_pad_remove_buffer_probe (sinkpad, priv->probe);

	gst_object_unref (sinkpad);

	gst_element_set_state (priv->pipeline, GST_STATE_NULL);
	gst_object_unref (GST_OBJECT (priv->pipeline));

	priv->link = NULL;
	priv->sink = NULL;
	priv->source = NULL;
	priv->convert = NULL;
	priv->pipeline = NULL;

	priv->set_active_state = 0;
}
Beispiel #13
0
static GstStateChangeReturn
gst_parse_test_element_change_state (GstElement * element,
    GstStateChange transition)
{
  GstParseTestElement *src = (GstParseTestElement *) element;

  if (transition == GST_STATE_CHANGE_READY_TO_PAUSED) {
    /* Add our pad */
    GstPad *pad;
    GstPad *ghost;

    if (src->fakesrc == NULL)
      return GST_STATE_CHANGE_FAILURE;

    pad = gst_element_get_pad (src->fakesrc, "src");
    if (pad == NULL)
      return GST_STATE_CHANGE_FAILURE;

    ghost = gst_ghost_pad_new ("src", pad);
    fail_if (ghost == NULL, "Failed to create ghost pad");
    /* activate and add */
    gst_pad_set_active (ghost, TRUE);
    gst_element_add_pad (GST_ELEMENT (src), ghost);
    gst_object_unref (pad);
  }

  return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
Beispiel #14
0
static void
gst_transcoder_new_decoded_pad(GstElement *decodebin, GstPad *pad, 
    gboolean last, gpointer data)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;
    GstTranscoder *transcoder = (GstTranscoder *)data;

    g_return_if_fail(transcoder != NULL);

    audiopad = gst_element_get_pad(transcoder->sink_bin, "sink");
    
    if(GST_PAD_IS_LINKED(audiopad)) {
        g_object_unref(audiopad);
        return;
    }

    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);
    
    if(!g_strrstr(gst_structure_get_name(str), "audio")) {
        gst_caps_unref(caps);
        gst_object_unref(audiopad);
        return;
    }
   
    gst_caps_unref(caps);
    gst_pad_link(pad, audiopad);
}
static void
new_decoded_pad_cb(GstElement *demuxer,
                   GstPad *new_pad,
                   gpointer user_data)
{
   GstElement *decoder;
   GstPad *pad;
   GstCaps *caps;
   gchar *str;

   caps = gst_pad_get_caps(new_pad);
   str = gst_caps_to_string(caps);

   if (g_str_has_prefix(str, "video/"))
     {
        decoder = GST_ELEMENT(user_data);

        pad = gst_element_get_pad(decoder, "sink");
        if (GST_PAD_LINK_FAILED(gst_pad_link(new_pad, pad)))
          {
             g_warning("Failed to link %s:%s to %s:%s", GST_DEBUG_PAD_NAME(new_pad),
                       GST_DEBUG_PAD_NAME(pad));
          }
     }
   g_free(str);
   gst_caps_unref(caps);
}
static void
Lastfmfp_cb_newpad(GstElement *decodebin, GstPad *pad, gboolean last, LastfmfpAudio *ma)
{
    GstCaps *caps;
    GstStructure *str;
    GstPad *audiopad;

    // only link once
    audiopad = gst_element_get_pad(ma->audio, "sink");
    if (GST_PAD_IS_LINKED(audiopad)) {
        g_object_unref(audiopad);
        return;
    }

    // check media type
    caps = gst_pad_get_caps(pad);
    str = gst_caps_get_structure(caps, 0);

    if (!g_strrstr(gst_structure_get_name(str), "audio")) {
        gst_caps_unref(caps);
        gst_object_unref(audiopad);
        return;
    }
    gst_caps_unref(caps);

    // link
    gst_pad_link(pad, audiopad);
    gst_object_unref(audiopad);
}
Beispiel #17
0
int
main (int argc, char *argv[])
{
  GstElement *a;
  GstElement *b;
  GstElement *pipeline;
  GstPad *pad;

  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new (NULL);

  a = gst_element_factory_make ("fakesrc", NULL);
  g_assert (a);
  b = gst_element_factory_make ("fakesink", NULL);
  g_assert (b);

  gst_bin_add_many (GST_BIN (pipeline), a, b, NULL);
  gst_element_link (a, b);

  pad = gst_element_get_pad (a, "src");
  g_signal_connect (G_OBJECT (pad), "fixate", G_CALLBACK (handler),
      (void *) 0xdeadbeef);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);


  return 0;
}
Beispiel #18
0
static void
cb_newpad (GstElement *decodebin,
	   GstPad     *pad,
	   gboolean    last,
	   gpointer    data)
{
  GstCaps *caps;
  GstStructure *str;
  GstPad *audiopad;

  /* only link once */
  audiopad = gst_element_get_pad (audio, "sink");
  if (GST_PAD_IS_LINKED (audiopad)) {
    g_object_unref (audiopad);
    return;
  }

  /* check media type */
  caps = gst_pad_get_caps (pad);
  str = gst_caps_get_structure (caps, 0);
  if (!g_strrstr (gst_structure_get_name (str), "audio")) {
    gst_caps_unref (caps);
    gst_object_unref (audiopad);
    return;
  }
  gst_caps_unref (caps);

  /* link'n'play */
  gst_pad_link (pad, audiopad);
}
Beispiel #19
0
static gboolean
gst_auto_video_sink_detect (GstAutoVideoSink * sink)
{
  GstElement *esink;
  GstPad *targetpad;

  gst_auto_video_sink_clear_kid (sink);

  /* find element */
  GST_DEBUG_OBJECT (sink, "Creating new kid");
  if (!(esink = gst_auto_video_sink_find_best (sink)))
    goto no_sink;

  sink->kid = esink;
  gst_bin_add (GST_BIN (sink), esink);

  /* attach ghost pad */
  GST_DEBUG_OBJECT (sink, "Re-assigning ghostpad");
  targetpad = gst_element_get_pad (sink->kid, "sink");
  gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
  gst_object_unref (targetpad);
  GST_DEBUG_OBJECT (sink, "done changing auto video sink");

  return TRUE;

  /* ERRORS */
no_sink:
  {
    GST_ELEMENT_ERROR (sink, LIBRARY, INIT, (NULL),
        ("Failed to find a supported video sink"));
    return FALSE;
  }
}
Beispiel #20
0
void Pipeline::cb_new_pad (GstElement* decodebin, GstPad* pad, gboolean last, GstElement* glimagesink)
{
    GstPad* glpad = gst_element_get_pad (glimagesink, "sink");
    
    //only link once 
    if (GST_PAD_IS_LINKED (glpad)) 
    {
        gst_object_unref (glpad);
        return;
    }

    GstCaps* caps = gst_pad_get_caps (pad);
    GstStructure* str = gst_caps_get_structure (caps, 0);
    if (!g_strrstr (gst_structure_get_name (str), "video")) 
    {
        gst_caps_unref (caps);
        gst_object_unref (glpad);
        return;
    }
    gst_caps_unref (caps);

    GstPadLinkReturn ret = gst_pad_link (pad, glpad);
    if (ret != GST_PAD_LINK_OK) 
        g_warning ("Failed to link with decodebin!\n");
}
Beispiel #21
0
static gboolean pad_event_handler(GstPad *pad, GstEvent *event) {
	// Establish thread-local.
	MpfComponent *component = MPF_COMPONENT(GST_OBJECT_PARENT(pad));
	mpf_component_set_curcomponent(component);
	GstElement *element = gst_pad_get_parent_element(pad);

	gchar *elementname = gst_element_get_name(element);
	gchar *padname = gst_pad_get_name(pad);
	const gchar *eventname = gst_event_type_get_name(event->type);

	MPF_PRIVATE_ALWAYS("element=%s pad=%s event=%s\n", elementname, padname,
			eventname);
	// If EOS, poke a message out of the events pad.
	if (event->type == GST_EVENT_EOS) {
		GstPad *events = gst_element_get_pad(element, "events");

		printf("GstPad *events=%p\n", events);

		GString *string = g_string_new("");
		g_string_printf(string, "%s: EOS buffer_count=%d\n", elementname,
				mpf_private.buffer_count);
		mpf_voidstar_push("events", mpf_voidstar_stralloc(string->str));
		mpf_voidstar_send_outbuffers();
		gst_pad_push_event(events, gst_event_new_eos());
	}

	g_free(elementname);
	g_free(padname);
	return gst_pad_event_default(pad, event);
}
Beispiel #22
0
static void get_device_data (ofGstDevice &webcam_device, int desired_framerate)
{
    string pipeline_desc = webcam_device.gstreamer_src + " name=source device=" +
            webcam_device.video_device + " ! fakesink";

    GError * err = NULL;
    GstElement * pipeline = gst_parse_launch (pipeline_desc.c_str(), &err);
    if ((pipeline == NULL) || (err != NULL)){
    	if (err){
    		ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data: %s", err->message);
    		g_error_free (err);
    	}else{
    		ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data, cannot get pipeline");
    	}
    	if(pipeline)
    		gst_object_unref (pipeline);
    	return;
    }

	// TODO: try to lower seconds,
    // Start the pipeline and wait for max. 10 seconds for it to start up
	gst_element_set_state (pipeline, GST_STATE_PLAYING);
	GstStateChangeReturn ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND);

	// Check if any error messages were posted on the bus
	GstBus * bus = gst_element_get_bus (pipeline);
	GstMessage * msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
	gst_object_unref (bus);

	if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS)){
		gst_element_set_state (pipeline, GST_STATE_PAUSED);

		GstElement *src = gst_bin_get_by_name (GST_BIN (pipeline), "source");
		char       *name;
		g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL);

		ofLog(OF_LOG_VERBOSE, "Device: %s (%s)\n", name==NULL?"":name, webcam_device.video_device.c_str());
		GstPad     *pad  = gst_element_get_pad (src, "src");
		GstCaps    *caps = gst_pad_get_caps (pad);
		gst_object_unref (pad);

		get_supported_video_formats (webcam_device, *caps, desired_framerate);

		gst_caps_unref (caps);
	}else if(msg){
		gchar *debug;
		gst_message_parse_error(msg, &err, &debug);

		ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data; module %s reported: %s",
			  gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);

		g_error_free(err);
		g_free(debug);
	}
	gst_element_set_state (pipeline, GST_STATE_NULL);
	gst_object_unref (pipeline);

}
Beispiel #23
0
static void
get_device_data (ofGstDevice &webcam_device)
{
    char                *pipeline_desc;
    GstElement          *pipeline;
    GError              *err;
    GstStateChangeReturn ret;
    GstMessage          *msg;
    GstBus              *bus;

    {
        pipeline_desc = g_strdup_printf ("%s name=source device=%s ! fakesink",
                                         webcam_device.gstreamer_src,
                                         webcam_device.video_device);
        err      = NULL;
        pipeline = gst_parse_launch (pipeline_desc, &err);
        if ((pipeline != NULL) && (err == NULL))
        {
            /* Start the pipeline and wait for max. 10 seconds for it to start up */
            gst_element_set_state (pipeline, GST_STATE_PLAYING);
            ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND);

            /* Check if any error messages were posted on the bus */
            bus = gst_element_get_bus (pipeline);
            msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
            gst_object_unref (bus);

            if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS))
            {
                GstElement *src;
                GstPad     *pad;
                char       *name;
                GstCaps    *caps;

                gst_element_set_state (pipeline, GST_STATE_PAUSED);

                src = gst_bin_get_by_name (GST_BIN (pipeline), "source");

                g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL);
                if (name == NULL)
                    name = "Unknown";

//        ofLog(OF_LOG_VERBOSE,"Device: %s (%s)\n", name, webcam_device.video_device);
                pad  = gst_element_get_pad (src, "src");
                caps = gst_pad_get_caps (pad);
                gst_object_unref (pad);
                get_supported_video_formats (webcam_device, *caps);
                gst_caps_unref (caps);
            }
            gst_element_set_state (pipeline, GST_STATE_NULL);
            gst_object_unref (pipeline);
        }
        if (err)
            g_error_free (err);

        g_free (pipeline_desc);
    }
}
Beispiel #24
0
/***
 * Creates an instance of a playbin with "audio-src" and
 * "video-src" ghost pads to allow redirected output streams.
 *
 * ### This function is probably not required now that MediaObject is based
 *     on decodebin directly.
 */
GstElement* GstHelper::createPluggablePlaybin()
{
    GstElement *playbin = 0;
    //init playbin and add to our pipeline
    playbin = gst_element_factory_make("playbin2", NULL);

    //Create an identity element to redirect sound
    GstElement *audioSinkBin =  gst_bin_new (NULL);
    GstElement *audioPipe = gst_element_factory_make("identity", NULL);
    gst_bin_add(GST_BIN(audioSinkBin), audioPipe);

    //Create a sinkpad on the identity
    GstPad *audiopad = gst_element_get_pad (audioPipe, "sink");
    gst_element_add_pad (audioSinkBin, gst_ghost_pad_new ("sink", audiopad));
    gst_object_unref (audiopad);

    //Create an "audio_src" source pad on the playbin
    GstPad *audioPlaypad = gst_element_get_pad (audioPipe, "src");
    gst_element_add_pad (playbin, gst_ghost_pad_new ("audio_src", audioPlaypad));
    gst_object_unref (audioPlaypad);

    //Done with our audio redirection
    g_object_set (G_OBJECT(playbin), "audio-sink", audioSinkBin, (const char*)NULL);

    // * * Redirect video to "video_src" pad : * *

    //Create an identity element to redirect sound
    GstElement *videoSinkBin =  gst_bin_new (NULL);
    GstElement *videoPipe = gst_element_factory_make("identity", NULL);
    gst_bin_add(GST_BIN(videoSinkBin), videoPipe);

    //Create a sinkpad on the identity
    GstPad *videopad = gst_element_get_pad (videoPipe, "sink");
    gst_element_add_pad (videoSinkBin, gst_ghost_pad_new ("sink", videopad));
    gst_object_unref (videopad);

    //Create an "audio_src" source pad on the playbin
    GstPad *videoPlaypad = gst_element_get_pad (videoPipe, "src");
    gst_element_add_pad (playbin, gst_ghost_pad_new ("video_src", videoPlaypad));
    gst_object_unref (videoPlaypad);

    //Done with our video redirection
    g_object_set (G_OBJECT(playbin), "video-sink", videoSinkBin, (const char*)NULL);
    return playbin;
}
/*
 * Method: get_pad(name)
 * name: the name of a pad.
 *
 * Retrieves a Gst::Pad object from the element by name.
 *
 * Returns: a Gst::Pad object, or nil if the pad cannot be found.
 */
static VALUE
rg_get_pad(VALUE self, VALUE pad_name)
{
    GstPad *pad = gst_element_get_pad(SELF(self),
                                       RVAL2CSTR(pad_name));

    return pad != NULL ? RGST_PAD_NEW(pad)
        : Qnil;
}
Beispiel #26
0
static void
on_composition_pad_added_cb (GstElement * composition, GstPad * pad,
    GstElement * sink)
{
  GstPad *s = gst_element_get_pad (sink, "sink");
  gst_pad_link (pad, s);
  ++composition_pad_added;
  gst_object_unref (s);
}
Beispiel #27
0
/* Component instance initialization and parameter handling. */
component_setup() {
	mpf_private.loglevel = mpf_param_get_int("loglevel");
	mpf_logger_init(&mpf_private.logger, mpf_private.loglevel,
			gst_element_get_name(GST_ELEMENT(component)));
	GstPad *input = gst_element_get_pad(GST_ELEMENT(
			mpf_component_get_curcomponent()), "input");
	gst_pad_set_event_function(input, pad_event_handler);
	if (mpf_param_get_int("mpf-debug"))
		mpf_component_get_curcomponent()->flags = MPF_DEBUG;
}
Beispiel #28
0
gint
main (gint   argc,
      gchar *argv[])
{
  GMainLoop *loop;
  GstElement *src, *dec, *conv, *sink;
  GstPad *audiopad;
  GstBus *bus;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* make sure we have input */
  if (argc != 2) {
    g_print ("Usage: %s <filename>\n", argv[0]);
    return -1;
  }

  /* setup */
  pipeline = gst_pipeline_new ("pipeline");

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, my_bus_callback, loop);
  gst_object_unref (bus);

  src = gst_element_factory_make ("filesrc", "source");
  g_object_set (G_OBJECT (src), "location", argv[1], NULL);
  dec = gst_element_factory_make ("decodebin", "decoder");
  g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad), NULL);
  gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL);
  gst_element_link (src, dec);

  /* create audio output */
  audio = gst_bin_new ("audiobin");
  conv = gst_element_factory_make ("audioconvert", "aconv");
  audiopad = gst_element_get_pad (conv, "sink");
  sink = gst_element_factory_make ("alsasink", "sink");
  gst_bin_add_many (GST_BIN (audio), conv, sink, NULL);
  gst_element_link (conv, sink);
  gst_element_add_pad (audio,
      gst_ghost_pad_new ("sink", audiopad));
  gst_object_unref (audiopad);
  gst_bin_add (GST_BIN (pipeline), audio);

  /* run */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  /* cleanup */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}
Beispiel #29
0
void StreamPipeline::NewPadCallback(
    GstElement* decodebin, GstPad* pad, gpointer self) {
  qLog(Debug) << "Linking pads";
  StreamPipeline* me = reinterpret_cast<StreamPipeline*>(self);
  GstPad* const audiopad = gst_element_get_pad(me->convert_bin_, "sink");

  gst_pad_link(pad, audiopad);
  gst_object_unref(audiopad);

  QMetaObject::invokeMethod(me, "StartPipeline", Qt::QueuedConnection);
}
Beispiel #30
0
static void
new_pad(GstElement *element, GstPad *pad, gpointer data) {
    GstPad *sinkpad;

    /* We can now link this pad with the audio decoder */
    g_print("Dynamic pad created, linking parser/decoder\n");

    sinkpad = gst_element_get_pad(decoder, "sink");
    gst_pad_link(pad, sinkpad);

    gst_object_unref(sinkpad);
}