コード例 #1
0
ファイル: testegl.c プロジェクト: 01org/gst-omx
static gboolean
init_playbin_player (APP_STATE_T * state, const gchar * uri)
{
  GstElement *vsink;

  vsink = gst_element_factory_make ("fakesink", "vsink");
  g_object_set (vsink, "sync", TRUE, "silent", TRUE, "qos", TRUE,
      "enable-last-sample", FALSE,
      "max-lateness", 20 * GST_MSECOND, "signal-handoffs", TRUE, NULL);

  g_signal_connect (vsink, "preroll-handoff", G_CALLBACK (preroll_cb), state);
  g_signal_connect (vsink, "handoff", G_CALLBACK (buffers_cb), state);

  gst_pad_add_probe (gst_element_get_static_pad (vsink, "sink"),
      GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, events_cb, state, NULL);
  gst_pad_add_probe (gst_element_get_static_pad (vsink, "sink"),
      GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, query_cb, state, NULL);

  /* Instantiate and configure playbin */
  state->pipeline = gst_element_factory_make ("playbin", "player");
  g_object_set (state->pipeline, "uri", uri,
      "video-sink", vsink, "flags",
      GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_AUDIO, NULL);

  state->vsink = gst_object_ref (vsink);
  return TRUE;
}
コード例 #2
0
ファイル: testegl.c プロジェクト: ryumiel/gst-omx
static gboolean
init_parse_launch_player (APP_STATE_T * state, const gchar * spipeline)
{
    GstElement *vsink;
    GError *error = NULL;

    /* ex:

       ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \
       h264parse !  omxh264dec ! glcolorscale ! fakesink name=vsink"

       ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \
       h264parse ! omxh264dec ! glcolorscale ! \
       video/x-raw(memory:EGLImage) ! fakesink name=vsink"

       ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \
       h264parse ! omxh264dec ! glcolorscale ! \
       video/x-raw(memory:GLMemory) ! fakesink name=vsink"

       ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \
       h264parse ! omxh264dec ! glcolorscale ! \
       video/x-raw(meta:GstVideoGLTextureUploadMeta) ! \
       fakesink name=vsink"

     */

    /* pipeline 1 and 2 are the same and the most efficient as glcolorscale
     * will enter in passthrough mode and testegl will just bind the eglimage
     * to a gl texture without any copy. */

    state->pipeline = gst_parse_launch (spipeline, &error);

    if (!state->pipeline) {
        g_printerr ("Unable to instatiate pipeline '%s': %s\n",
                    spipeline, error->message);
        return FALSE;
    }

    vsink = gst_bin_get_by_name (GST_BIN (state->pipeline), "vsink");

    if (!vsink) {
        g_printerr ("Unable to find a fakesink named 'vsink'");
        return FALSE;
    }

    g_object_set (vsink, "sync", TRUE, "silent", TRUE, "qos", TRUE,
                  "enable-last-sample", FALSE,
                  "max-lateness", 20 * GST_MSECOND, "signal-handoffs", TRUE, NULL);

    g_signal_connect (vsink, "preroll-handoff", G_CALLBACK (preroll_cb), state);
    g_signal_connect (vsink, "handoff", G_CALLBACK (buffers_cb), state);

    gst_pad_add_probe (gst_element_get_static_pad (vsink, "sink"),
                       GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, events_cb, state, NULL);
    gst_pad_add_probe (gst_element_get_static_pad (vsink, "sink"),
                       GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, query_cb, state, NULL);

    state->vsink = gst_object_ref (vsink);
    return TRUE;
}
コード例 #3
0
static void
gst_hls_sink_init (GstHlsSink * sink)
{
  GstPadTemplate *templ = gst_static_pad_template_get (&sink_template);
  sink->ghostpad = gst_ghost_pad_new_no_target_from_template ("sink", templ);
  gst_object_unref (templ);
  gst_element_add_pad (GST_ELEMENT_CAST (sink), sink->ghostpad);
  gst_pad_add_probe (sink->ghostpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      gst_hls_sink_ghost_event_probe, sink, NULL);
  gst_pad_add_probe (sink->ghostpad, GST_PAD_PROBE_TYPE_BUFFER,
      gst_hls_sink_ghost_buffer_probe, sink, NULL);
  gst_pad_set_chain_list_function (sink->ghostpad, gst_hls_sink_chain_list);

  sink->location = g_strdup (DEFAULT_LOCATION);
  sink->playlist_location = g_strdup (DEFAULT_PLAYLIST_LOCATION);
  sink->playlist_root = g_strdup (DEFAULT_PLAYLIST_ROOT);
  sink->playlist_length = DEFAULT_PLAYLIST_LENGTH;
  sink->max_files = DEFAULT_MAX_FILES;
  sink->target_duration = DEFAULT_TARGET_DURATION;

  /* haven't added a sink yet, make it is detected as a sink meanwhile */
  GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_FLAG_SINK);

  gst_hls_sink_reset (sink);
}
コード例 #4
0
ファイル: GStreamerReader.cpp プロジェクト: msliu/gecko-dev
void GStreamerReader::InstallPadCallbacks()
{
  GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink");

  gst_pad_add_probe(sinkpad,
      (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING |
        GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
        GST_PAD_PROBE_TYPE_EVENT_UPSTREAM |
        GST_PAD_PROBE_TYPE_EVENT_FLUSH),
      &GStreamerReader::EventProbeCb, this, nullptr);
  gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM,
      GStreamerReader::QueryProbeCb, nullptr, nullptr);

  gst_pad_set_element_private(sinkpad, this);
  gst_object_unref(sinkpad);

  sinkpad = gst_element_get_static_pad(GST_ELEMENT(mAudioAppSink), "sink");
  gst_pad_add_probe(sinkpad,
      (GstPadProbeType) (GST_PAD_PROBE_TYPE_SCHEDULING |
        GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
        GST_PAD_PROBE_TYPE_EVENT_UPSTREAM |
        GST_PAD_PROBE_TYPE_EVENT_FLUSH),
      &GStreamerReader::EventProbeCb, this, nullptr);
  gst_object_unref(sinkpad);
}
コード例 #5
0
ファイル: kmsutils.c プロジェクト: 2bees-rd/kms-core
void
kms_utils_manage_gaps (GstPad * pad)
{
  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER, discont_detection_probe,
      NULL, NULL);
  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      gap_detection_probe, NULL, NULL);
}
コード例 #6
0
static void
on_demuxElementAdded (GstBin * demux, GstElement * element, gpointer user_data)
{
  GstAdaptiveDemuxTestEnginePrivate *priv =
      (GstAdaptiveDemuxTestEnginePrivate *) user_data;
  GstAdaptiveDemuxTestOutputStream *stream = NULL;
  GstPad *internal_pad;
  gchar *srcbin_name;
  gint i;

  srcbin_name = GST_ELEMENT_NAME (element);
  GST_TEST_LOCK (priv);
  for (i = 0; i < priv->engine.output_streams->len; i++) {
    stream = g_ptr_array_index (priv->engine.output_streams, i);
    if (strstr (srcbin_name, GST_PAD_NAME (stream->pad)) != NULL)
      break;
  }
  fail_unless (stream != NULL);

  /* keep the reference to the internal_pad.
   * We will need it to identify the stream in the on_demuxReceivesEvent callback
   */
  if (stream->internal_pad) {
    gst_pad_remove_probe (stream->internal_pad, stream->internal_pad_probe);
    gst_object_unref (stream->internal_pad);
  }
  internal_pad = gst_element_get_static_pad (element, "src");
  stream->internal_pad_probe =
      gst_pad_add_probe (internal_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      (GstPadProbeCallback) on_demuxReceivesEvent, priv, NULL);
  stream->internal_pad = internal_pad;
  GST_TEST_UNLOCK (priv);

}
コード例 #7
0
void AudioSourceProviderGStreamer::handleNewDeinterleavePad(GstPad* pad)
{
    m_deinterleaveSourcePads++;

    if (m_deinterleaveSourcePads > 2) {
        g_warning("The AudioSourceProvider supports only mono and stereo audio. Silencing out this new channel.");
        GstElement* queue = gst_element_factory_make("queue", 0);
        GstElement* sink = gst_element_factory_make("fakesink", 0);
        g_object_set(sink, "async", FALSE, nullptr);
        gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr);

        GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
        gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);

        GQuark quark = g_quark_from_static_string("peer");
        g_object_set_qdata(G_OBJECT(pad), quark, sinkPad.get());
        gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING);
        gst_element_sync_state_with_parent(queue);
        gst_element_sync_state_with_parent(sink);
        return;
    }

    // A new pad for a planar channel was added in deinterleave. Plug
    // in an appsink so we can pull the data from each
    // channel. Pipeline looks like:
    // ... deinterleave ! queue ! appsink.
    GstElement* queue = gst_element_factory_make("queue", 0);
    GstElement* sink = gst_element_factory_make("appsink", 0);

    GstAppSinkCallbacks callbacks;
    callbacks.eos = 0;
    callbacks.new_preroll = 0;
    callbacks.new_sample = onAppsinkNewBufferCallback;
    gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, 0);

    g_object_set(sink, "async", FALSE, nullptr);

    GRefPtr<GstCaps> caps = adoptGRef(gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(gSampleBitRate),
        "channels", G_TYPE_INT, 1,
        "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
        "layout", G_TYPE_STRING, "interleaved", nullptr));

    gst_app_sink_set_caps(GST_APP_SINK(sink), caps.get());

    gst_bin_add_many(GST_BIN(m_audioSinkBin.get()), queue, sink, nullptr);

    GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
    gst_pad_link_full(pad, sinkPad.get(), GST_PAD_LINK_CHECK_NOTHING);

    GQuark quark = g_quark_from_static_string("peer");
    g_object_set_qdata(G_OBJECT(pad), quark, sinkPad.get());

    gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING);

    sinkPad = adoptGRef(gst_element_get_static_pad(sink, "sink"));
    gst_pad_add_probe(sinkPad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, onAppsinkFlushCallback, this, nullptr);

    gst_element_sync_state_with_parent(queue);
    gst_element_sync_state_with_parent(sink);
}
コード例 #8
0
ファイル: kmsagnosticbin.c プロジェクト: shelsonjava/kms-core
static GstPad *
kms_agnostic_bin2_request_new_pad (GstElement * element,
    GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
{
  GstPad *pad;
  gchar *pad_name;
  KmsAgnosticBin2 *self = KMS_AGNOSTIC_BIN2 (element);

  GST_OBJECT_LOCK (self);
  pad_name = g_strdup_printf ("src_%d", self->priv->pad_count++);
  GST_OBJECT_UNLOCK (self);

  pad = gst_ghost_pad_new_no_target_from_template (pad_name, templ);
  g_free (pad_name);

  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
      kms_agnostic_bin2_src_reconfigure_probe, element, NULL);

  g_signal_connect (pad, "unlinked",
      G_CALLBACK (kms_agnostic_bin2_src_unlinked), self);

  gst_pad_set_active (pad, TRUE);

  if (gst_element_add_pad (element, pad)) {
    return pad;
  }

  g_object_unref (pad);

  return NULL;
}
コード例 #9
0
static GstElement *
kms_recorder_endpoint_create_sink (KmsRecorderEndpoint * self)
{
  gulong *probe_id;
  GstElement *sink;
  GstPad *sinkpad;

  sink = kms_recorder_endpoint_get_sink (self);

  if (sink == NULL) {
    sink = gst_element_factory_make ("fakesink", NULL);
    GST_ELEMENT_ERROR (self, STREAM, WRONG_TYPE, ("No available sink"), (NULL));
    return sink;
  }

  sinkpad = gst_element_get_static_pad (sink, "sink");
  if (sinkpad == NULL) {
    GST_WARNING ("No sink pad available for element %" GST_PTR_FORMAT, sink);
    return sink;
  }

  probe_id = g_slice_new0 (gulong);
  *probe_id = gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      stop_notification_cb, self, NULL);
  g_object_set_data_full (G_OBJECT (sinkpad), KEY_RECORDER_PAD_PROBE_ID,
      probe_id, destroy_ulong);
  g_object_unref (sinkpad);

  return sink;
}
コード例 #10
0
static void
qtdemux_pad_added_cb (GstElement * element, GstPad * pad, CommonTestData * data)
{
  data->srcpad = pad;
  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
      (GstPadProbeCallback) qtdemux_probe, data, NULL);
}
コード例 #11
0
static void
sink_required_cb (KmsConfController * controller, gpointer recorder)
{
  KmsRecorderEndPoint *self = KMS_RECORDER_END_POINT (recorder);
  gulong *probe_id;
  GstElement *sink;
  GstPad *sinkpad;

  sink = kms_recorder_end_point_get_sink (self);

  if (sink == NULL) {
    sink = gst_element_factory_make ("fakesink", NULL);
    GST_ELEMENT_ERROR (self, STREAM, WRONG_TYPE, ("No available sink"), (NULL));
    return;
  }

  g_object_set (self->priv->controller, "sink", sink, NULL);

  sinkpad = gst_element_get_static_pad (sink, "sink");
  if (sinkpad == NULL) {
    GST_WARNING ("No sink pad available for element %" GST_PTR_FORMAT, sink);
    return;
  }

  probe_id = g_slice_new0 (gulong);
  *probe_id = gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      stop_notification_cb, self, NULL);
  g_object_set_data_full (G_OBJECT (sinkpad), KEY_RECORDER_PAD_PROBE_ID,
      probe_id, destroy_ulong);
  g_object_unref (sinkpad);
}
コード例 #12
0
static KmsAlphaBlendingData *
kms_alpha_blending_port_data_create (KmsAlphaBlending * mixer, gint id)
{
  KmsAlphaBlendingData *data;
  gchar *padname;

  data = kms_create_alpha_blending_data ();
  data->id = id;
  data->mixer = mixer;
  data->videoconvert = gst_element_factory_make ("videoconvert", NULL);

  gst_bin_add_many (GST_BIN (mixer), data->videoconvert, NULL);

  gst_element_sync_state_with_parent (data->videoconvert);

  /*link basemixer -> video_agnostic */
  kms_base_hub_link_video_sink (KMS_BASE_HUB (mixer), id, data->videoconvert,
      "sink", FALSE);

  padname = g_strdup_printf (AUDIO_SINK_PAD, id);
  kms_base_hub_link_audio_sink (KMS_BASE_HUB (mixer), id,
      mixer->priv->audiomixer, padname, FALSE);
  g_free (padname);

  data->videoconvert_sink_pad = gst_element_get_static_pad (data->videoconvert,
      "sink");

  data->link_probe_id = gst_pad_add_probe (data->videoconvert_sink_pad,
      GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_BLOCK,
      (GstPadProbeCallback) link_to_videomixer,
      KMS_ALPHA_BLENDING_REF (data), (GDestroyNotify) kms_ref_struct_unref);

  return data;
}
コード例 #13
0
static void
post_decodebin_pad_added_handler (GstElement * decodebin, GstPad * pad,
    KmsHttpEndpoint * self)
{
  GstElement *appsink;
  GstPad *sinkpad;

  GST_DEBUG_OBJECT (pad, "Pad added");

  /* Create appsink and link to pad */
  appsink = gst_element_factory_make ("appsink", NULL);
  g_object_set (appsink, "sync", TRUE, "enable-last-sample",
      FALSE, "emit-signals", TRUE, "qos", FALSE, "max-buffers", 1,
      "async", FALSE, NULL);
  gst_bin_add (GST_BIN (self->pipeline), appsink);

  sinkpad = gst_element_get_static_pad (appsink, "sink");
  if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) {
    GST_ERROR_OBJECT (self, "Can not link %" GST_PTR_FORMAT " to %"
        GST_PTR_FORMAT, decodebin, appsink);
  }

  gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      set_appsrc_caps, self, NULL);

  g_object_unref (sinkpad);

  g_object_set_qdata (G_OBJECT (pad), appsink_data_quark (), appsink);

  gst_element_sync_state_with_parent (appsink);
}
コード例 #14
0
ファイル: dvbbasebin.c プロジェクト: drothlis/gst-plugins-bad
static void
dvb_base_bin_init_cam (DvbBaseBin * dvbbasebin)
{
  gint adapter;
  gchar *ca_file;

  g_object_get (dvbbasebin->dvbsrc, "adapter", &adapter, NULL);
  /* TODO: handle multiple cams */
  ca_file = g_strdup_printf ("/dev/dvb/adapter%d/ca0", adapter);
  if (g_file_test (ca_file, G_FILE_TEST_EXISTS)) {
    dvbbasebin->hwcam = cam_device_new ();
    if (cam_device_open (dvbbasebin->hwcam, ca_file)) {
      /* HACK: poll the cam in a buffer probe */
      dvbbasebin->ts_pad =
          gst_element_get_request_pad (dvbbasebin->mpegtsparse, "src_%u");
      gst_pad_add_probe (dvbbasebin->ts_pad,
          GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, dvb_base_bin_ts_pad_probe_cb,
          dvbbasebin, NULL);
    } else {
      GST_ERROR_OBJECT (dvbbasebin, "could not open %s", ca_file);
      cam_device_free (dvbbasebin->hwcam);
      dvbbasebin->hwcam = NULL;
    }
  }

  g_free (ca_file);
}
コード例 #15
0
ファイル: queue.c プロジェクト: sebras/gstreamer
static void
block_src (void)
{
  qsrcpad = gst_element_get_static_pad (queue, "src");
  probe_id = gst_pad_add_probe (qsrcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      NULL, NULL, NULL);
}
コード例 #16
0
static void
setup_input_selector_with_2_streams (gint active_stream)
{
  eos_received = FALSE;
  g_mutex_init (&eos_probe_lock);
  g_cond_init (&eos_probe_cond);

  selector = gst_check_setup_element ("input-selector");
  output_pad = gst_check_setup_sink_pad (selector, &sinktemplate);

  gst_pad_set_active (output_pad, TRUE);
  stream1_pad = setup_input_pad (selector);
  stream2_pad = setup_input_pad (selector);

  if (active_stream == 1) {
    g_object_set (selector, "active-pad", GST_PAD_PEER (stream1_pad), NULL);
  } else {
    g_object_set (selector, "active-pad", GST_PAD_PEER (stream2_pad), NULL);
  }

  eos_probe =
      gst_pad_add_probe (output_pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
      eos_pushed_probe, NULL, NULL);

  fail_unless (gst_element_set_state (selector,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS,
      "could not set to playing");

  gst_check_setup_events_with_stream_id (stream1_pad, selector, NULL,
      GST_FORMAT_TIME, "stream-1-id");
  gst_check_setup_events_with_stream_id (stream2_pad, selector, NULL,
      GST_FORMAT_TIME, "stream-2-id");
}
コード例 #17
0
void GStreamerGWorld::exitFullscreen()
{
    if (!m_dynamicPadName)
        return;

    GstElement* sinkPtr = 0;
    g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL);
    GRefPtr<GstElement> videoSink = adoptGRef(sinkPtr);

    GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee"));
    GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(tee.get(), m_dynamicPadName.get()));

    // Block data flow towards the pipeline branch to remove. No need
    // for pad blocking if the pipeline is paused.
    GstState state;
    gst_element_get_state(m_pipeline, &state, 0, 0);
#ifdef GST_API_VERSION_1
    if (state >= GST_STATE_PLAYING)
        gst_pad_add_probe(srcPad.get(), GST_PAD_PROBE_TYPE_IDLE, reinterpret_cast<GstPadProbeCallback>(gstGWorldPadProbeCallback), this, 0);
    else
#else
    if (state < GST_STATE_PLAYING || gst_pad_set_blocked(srcPad.get(), true))
#endif
        removePlatformVideoSink();

    m_videoWindow = 0;
}
コード例 #18
0
ファイル: main.c プロジェクト: jcaden/tee_test
static gboolean
connect_branch (gpointer pipeline)
{
  GstElement *tee = gst_bin_get_by_name (GST_BIN (pipeline), "tee");
  GstElement *queue, *sink;
  GstPad *tee_src;

  if (tee == NULL) {
    g_atomic_int_set (&error, TRUE);
    goto end;
  }

  queue = gst_element_factory_make ("queue", NULL);
  sink = gst_element_factory_make ("appsink", NULL);

  g_object_set (G_OBJECT (sink), "emit-signals", TRUE, "sync", FALSE, NULL);
  g_signal_connect_data (G_OBJECT (sink), "new-sample", G_CALLBACK (new_sample),
      NULL, NULL, 0);

  gst_bin_add_many (GST_BIN (pipeline), queue, sink, NULL);
  gst_element_link (queue, sink);
  gst_element_sync_state_with_parent (queue);
  gst_element_sync_state_with_parent (sink);

  tee_src = gst_element_get_request_pad (tee, "src_%u");
  gst_pad_add_probe (tee_src, GST_PAD_PROBE_TYPE_BLOCKING, link_to_tee,
      g_object_ref (queue), g_object_unref);

  g_object_unref (tee);

end:
  return G_SOURCE_REMOVE;
}
コード例 #19
0
void NWaveformBuilderGstreamer::start(const QString &file)
{
	stop();

	if (peaksFindFromCache(file))
		return;
	if (!QFileInfo(file).exists())
		return;
	m_currentFile = file;

	m_playbin = gst_parse_launch("uridecodebin name=w_uridecodebin \
	                              ! audioconvert ! audio/x-raw, format=S16LE \
	                              ! fakesink name=w_sink", NULL);

	gchar *uri = g_filename_to_uri(QFileInfo(file).absoluteFilePath().toUtf8().constData(), NULL, NULL);
	GstElement *uridecodebin = gst_bin_get_by_name(GST_BIN(m_playbin), "w_uridecodebin");
	g_object_set(uridecodebin, "uri", uri, NULL);
	gst_object_unref(uridecodebin);

	GstElement *sink = gst_bin_get_by_name(GST_BIN(m_playbin), "w_sink");
	GstPad *pad = gst_element_get_static_pad(sink, "sink");
	gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)_handleBuffer, this, NULL);
	gst_object_unref(sink);
	gst_object_unref(pad);

	reset();
	QThread::start();

	if (!m_timer->isActive())
		m_timer->start(100);

	gst_element_set_state(m_playbin, GST_STATE_PLAYING);
}
コード例 #20
0
static DecodebinInputStream *
create_input_stream (GstDecodebin3 * dbin, GstStream * stream, GstPad * pad,
    DecodebinInput * input)
{
  DecodebinInputStream *res = g_new0 (DecodebinInputStream, 1);

  GST_DEBUG_OBJECT (pad, "Creating input stream for stream %p %s (input:%p)",
      stream, gst_stream_get_stream_id (stream), input);

  res->dbin = dbin;
  res->input = input;
  res->pending_stream = gst_object_ref (stream);
  res->srcpad = pad;

  /* Put probe on output source pad (for detecting EOS/STREAM_START/FLUSH) */
  res->output_event_probe_id =
      gst_pad_add_probe (pad,
      GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM
      | GST_PAD_PROBE_TYPE_EVENT_FLUSH,
      (GstPadProbeCallback) parse_chain_output_probe, res, NULL);

  /* Add to list of current input streams */
  dbin->input_streams = g_list_append (dbin->input_streams, res);
  GST_DEBUG_OBJECT (pad, "Done creating input stream");

  return res;
}
コード例 #21
0
static void
setup_input_selector_counters (GstElement * element)
{
  GstIterator *iterator;
  gboolean done = FALSE;
  GValue value = { 0, };
  GstPad *pad;
  BufferCountData *bcd;

  iterator = gst_element_iterate_pads (element);
  while (!done) {
    switch (gst_iterator_next (iterator, &value)) {
      case GST_ITERATOR_OK:
        pad = g_value_dup_object (&value);
        bcd = g_slice_new0 (BufferCountData);
        g_object_set_data (G_OBJECT (pad), "buffer-count-data", bcd);
        bcd->probe_id = gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
            (GstPadProbeCallback) input_selector_pad_probe, NULL, NULL);
        bcd->pad = pad;
        g_value_reset (&value);
        break;
      case GST_ITERATOR_RESYNC:
        gst_iterator_resync (iterator);
        break;
      case GST_ITERATOR_ERROR:
        done = TRUE;
        break;
      case GST_ITERATOR_DONE:
        done = TRUE;
        break;
    }
  }
  gst_iterator_free (iterator);
}
コード例 #22
0
static inline gboolean
ges_track_video_transition_set_transition_type_internal (GESTrackVideoTransition
    * self, GESVideoStandardTransitionType type)
{
  GESTrackVideoTransitionPrivate *priv = self->priv;

  GST_DEBUG ("%p %d => %d", self, priv->type, type);

  if (type == priv->type && !priv->pending_type) {
    GST_INFO ("This type is already set on this transition\n");
    return TRUE;
  }

  if (type == priv->pending_type) {
    GST_INFO ("This type is already pending for this transition\n");
    return TRUE;
  }

  if (priv->type &&
      ((priv->type != type) || (priv->type != priv->pending_type)) &&
      ((type == GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE) ||
          (priv->type == GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE))) {
    GstPad *pad = gst_element_get_static_pad (priv->topbin, "sinka");

    priv->pending_type = type;
    if (type != GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE) {
      if (!priv->topbin)
        return FALSE;
      priv->smpte = NULL;
      gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_IDLE,
          (GstPadProbeCallback) switch_to_smpte_cb, self, NULL);
    } else {
      if (!priv->topbin)
        return FALSE;
      priv->start_value = 1.0;
      priv->end_value = 0.0;
      gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_IDLE,
          (GstPadProbeCallback) switch_to_crossfade_cb, self, NULL);
    }
    return TRUE;
  }
  priv->pending_type = type;
  if (priv->smpte && (type != GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE)) {
    g_object_set (priv->smpte, "type", (gint) type, NULL);
  }
  return TRUE;
}
コード例 #23
0
ファイル: util.c プロジェクト: rodrimc/libmicromb
void
pad_added_cb (GstElement *src, GstPad *new_pad, MbMedia *media)
{
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  GstPad *peer = NULL;
  const gchar *new_pad_type = NULL;
  gboolean success = FALSE;

  g_assert (media);

  g_debug ("Received new pad '%s' from '%s'\n", GST_PAD_NAME(new_pad),
      media->name);

  new_pad_caps = gst_pad_query_caps (new_pad, NULL);
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  new_pad_type = gst_structure_get_name (new_pad_struct);

  g_debug ("New pad type: %s\n", new_pad_type);

  g_mutex_lock(&(media->mutex));

  media->valid_pads++;

  if (g_str_has_prefix(new_pad_type, "video"))
  {
    success = set_video_bin (media->bin, media, new_pad);

    if (success)
      peer = gst_element_get_static_pad(_mb_global_data.video_mixer,
          media->video_pad_name);
  }
  else if (g_str_has_prefix(new_pad_type, "audio"))
  {
    success = set_audio_bin (media->bin, media, new_pad);

    if (success)
      peer = gst_element_get_static_pad(_mb_global_data.audio_mixer,
          media->audio_pad_name);
  }

  if (success)
  {
    gst_pad_set_offset (new_pad, media->start_offset);

    if (peer != NULL)
    {
      gst_pad_add_probe (peer, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
          eos_event_cb, media, NULL);

      gst_object_unref(peer);
    }
  }

  g_mutex_unlock(&(media->mutex));

  if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);
}
コード例 #24
0
static gboolean
connect_pads (gpointer user_data)
{
  KmsConnectData *data = user_data;

  CONNECT_DATA_LOCK (data);

  if (!data->audio_connected && data->audiosrc != NULL &&
      data->audiosink != NULL) {
    data->audio_connected = gst_pad_link (data->audiosrc, data->audiosink) ==
        GST_PAD_LINK_OK;
    fail_unless (data->audio_connected, "Could not connect audio pads");
    GST_DEBUG ("Connected audio stream");
    if (data->audio_probe != NULL) {
      gst_pad_add_probe (data->audiosink, GST_PAD_PROBE_TYPE_BUFFER,
          (GstPadProbeCallback) audio_probe_cb, data, NULL);
    }
  }

  if (!data->video_connected && data->videosrc != NULL &&
      data->videosink != NULL) {
    data->video_connected = gst_pad_link (data->videosrc, data->videosink) ==
        GST_PAD_LINK_OK;
    fail_unless (data->video_connected, "Could not connect video pads");
    GST_DEBUG ("Connected video stream");
    if (data->video_probe != NULL) {
      gst_pad_add_probe (data->videosink, GST_PAD_PROBE_TYPE_BUFFER,
          (GstPadProbeCallback) video_probe_cb, data, NULL);
    }
  }

  if (!data->data_connected && data->datasrc != NULL && data->datasink != NULL) {
    data->data_connected = gst_pad_link (data->datasrc, data->datasink) ==
        GST_PAD_LINK_OK;
    fail_unless (data->data_connected, "Could not connect data pads");
    GST_DEBUG ("Connected data stream");
    if (data->audio_probe != NULL) {
      gst_pad_add_probe (data->datasink, GST_PAD_PROBE_TYPE_BUFFER,
          (GstPadProbeCallback) data_probe_cb, data, NULL);
    }
  }

  CONNECT_DATA_UNLOCK (data);

  return G_SOURCE_REMOVE;
}
static gboolean
timeout_cb (gpointer user_data)
{
  gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      pad_probe_cb, user_data, NULL);

  return TRUE;
}
コード例 #26
0
InbandTextTrackPrivateGStreamer::InbandTextTrackPrivateGStreamer(gint index, GRefPtr<GstPad> pad)
    : InbandTextTrackPrivate(WebVTT), TrackPrivateBaseGStreamer(this, index, pad)
{
    m_eventProbe = gst_pad_add_probe(m_pad.get(), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
        reinterpret_cast<GstPadProbeCallback>(textTrackPrivateEventCallback), this, 0);

    notifyTrackOfStreamChanged();
}
コード例 #27
0
void
MediaSinkImpl::unlinkUnchecked (GstPad *sink)
{
  GstPad *peer;
  GstPad *sinkPad;

  if (sink == NULL) {
    sinkPad = gst_element_get_static_pad (getGstreamerElement(),
                                          getPadName().c_str() );
  } else {
    sinkPad = sink;
  }

  if (sinkPad == NULL) {
    return;
  }

  peer = gst_pad_get_peer (sinkPad);

  if (peer != NULL) {
    Glib::Threads::Cond cond;
    Glib::Threads::Mutex cmutex;
    bool blocked = FALSE;
    std::function <void (GstPad *, GstPadProbeInfo *) >
    blockedLambda = [&] (GstPad * pad, GstPadProbeInfo * info) {
      Glib::Threads::Mutex::Lock lock (cmutex);

      GST_DEBUG ("Peer pad blocked %" GST_PTR_FORMAT, pad);

      if (blocked) {
        return;
      }

      gst_pad_unlink (pad, sinkPad);
      blocked = TRUE;

      cond.signal();
    };

    gst_pad_add_probe (peer, (GstPadProbeType) (GST_PAD_PROBE_TYPE_BLOCKING),
                       pad_blocked_adaptor, &blockedLambda, NULL);

    cmutex.lock ();

    while (!blocked) {
      cond.wait (cmutex);
    }

    cmutex.unlock ();

    g_object_unref (peer);
  }

  if (sink == NULL) {
    gst_element_release_request_pad (getGstreamerElement(), sinkPad);
    g_object_unref (sinkPad);
  }
}
コード例 #28
0
static void
block_proxypad (GstPlaySinkConvertBin * self)
{
  if (self->sink_proxypad_block_id == 0) {
    self->sink_proxypad_block_id =
        gst_pad_add_probe (self->sink_proxypad,
        GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, pad_blocked_cb, self, NULL);
  }
}
コード例 #29
0
ファイル: vorbistag.c プロジェクト: rawoul/gst-plugins-base
static void
start_pipeline (GstElement * element)
{
  id = gst_pad_add_probe (mysinkpad, GST_PAD_PROBE_TYPE_BUFFER,
      (GstPadProbeCallback) buffer_probe, NULL, NULL);

  pending_buffers = g_async_queue_new ();
  gst_element_set_state (element, GST_STATE_PLAYING);
}
コード例 #30
0
ファイル: metadata.c プロジェクト: DavidYangfei/kms-core
static void
set_probe_on_pad (GstElement * e, const gchar * pad_name,
    GstPadProbeCallback callback)
{
  GstPad *pad;

  pad = gst_element_get_static_pad (e, pad_name);
  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER, callback, NULL, NULL);
  g_object_unref (pad);
}