Exemplo n.º 1
0
void
pad_added_cb (GstElement *src, GstPad *new_pad, MbMedia *media)
{
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  GstPad *peer = NULL;
  const gchar *new_pad_type = NULL;
  gboolean success = FALSE;

  g_assert (media);

  g_debug ("Received new pad '%s' from '%s'\n", GST_PAD_NAME(new_pad),
      media->name);

  new_pad_caps = gst_pad_query_caps (new_pad, NULL);
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  new_pad_type = gst_structure_get_name (new_pad_struct);

  g_debug ("New pad type: %s\n", new_pad_type);

  g_mutex_lock(&(media->mutex));

  media->valid_pads++;

  if (g_str_has_prefix(new_pad_type, "video"))
  {
    success = set_video_bin (media->bin, media, new_pad);

    if (success)
      peer = gst_element_get_static_pad(_mb_global_data.video_mixer,
          media->video_pad_name);
  }
  else if (g_str_has_prefix(new_pad_type, "audio"))
  {
    success = set_audio_bin (media->bin, media, new_pad);

    if (success)
      peer = gst_element_get_static_pad(_mb_global_data.audio_mixer,
          media->audio_pad_name);
  }

  if (success)
  {
    gst_pad_set_offset (new_pad, media->start_offset);

    if (peer != NULL)
    {
      gst_pad_add_probe (peer, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
          eos_event_cb, media, NULL);

      gst_object_unref(peer);
    }
  }

  g_mutex_unlock(&(media->mutex));

  if (new_pad_caps != NULL)
    gst_caps_unref (new_pad_caps);
}
Exemplo n.º 2
0
static void on_new_decoded_pad(GstElement *introdec, 
                               GstPad *srcpad,
                               gpointer data)
{  
   GstPadLinkReturn result;
   GstPad *sinkpad;
   GstCaps *new_pad_caps;

   CustomData *cdata=(CustomData*)data;
   GstElement *introbin=cdata->introbin;

   new_pad_caps=gst_pad_query_caps(srcpad,NULL);
   g_print("Caps:%s\n",gst_caps_to_string(new_pad_caps));

   /* Setup src pad offset, sync with pipeline. */
   gint64 pos2;
   pos2=gst_element_get_base_time(cdata->pipeline);
   GstClock *clock;
   clock=gst_pipeline_get_clock(GST_PIPELINE(cdata->pipeline));
   GstClockTime clock_time;
   clock_time=gst_clock_get_time(clock);
   gst_object_unref(clock);
//   g_print("Pipeline times: base_time=%lld\n clock_time=%lld\n",
//		            pos2,clock_time);
   gst_pad_set_offset(srcpad,clock_time-pos2);
   cdata->introbin_offset=clock_time-pos2;

   if(strncmp(gst_caps_to_string(new_pad_caps),"video",5)==0) {
       GstElement *vqueue;
       vqueue=gst_bin_get_by_name(GST_BIN(introbin),"introscale");
       sinkpad=gst_element_get_static_pad(vqueue,"sink");
       result=gst_pad_link(srcpad,sinkpad);
       if(result!=GST_PAD_LINK_OK) {
          g_printerr("Couldn't link introbin decodebin video pad...\n");
       }
       gst_object_unref(vqueue);
   }
   if(strncmp(gst_caps_to_string(new_pad_caps),"audio",5)==0) {
       GstElement *arate;
       arate=gst_bin_get_by_name(GST_BIN(introbin),"introaudiorate");
       sinkpad=gst_element_get_static_pad(arate,"sink");
       result=gst_pad_link(srcpad,sinkpad);
       if(result!=GST_PAD_LINK_OK) {
          GstCaps *peer_caps;
	  peer_caps=gst_pad_query_caps(sinkpad,NULL);
	  g_print("SinkCaps:%s\n",gst_caps_to_string(peer_caps));
          g_printerr("Couldn't link introbin decodebin audio pad...\n");
	  gst_caps_unref(peer_caps);
       }
       gst_object_unref(arate);
   }
}
Exemplo n.º 3
0
gboolean introbin_set_pad_offset(CustomData *data)
{
  gint64 pos2;
  pos2=gst_element_get_base_time(data->pipeline);
  GstClock *clock;
  clock=gst_pipeline_get_clock(GST_PIPELINE(data->pipeline));
  GstClockTime clock_time;
  clock_time=gst_clock_get_time(clock);
  gst_object_unref(clock);
  g_print("Pipeline times: base_time=%lld\n clock_time=%lld",
		            pos2,clock_time);
  GstElement *dec=gst_bin_get_by_name(GST_BIN(data->introbin),"introdec");
  GstPad *src_pad1,*src_pad2;
  src_pad1=gst_element_get_static_pad(GST_ELEMENT(dec),"src_0");
  gst_pad_set_offset(src_pad1,clock_time-pos2);
  gst_object_unref(src_pad1);
  src_pad2=gst_element_get_static_pad(GST_ELEMENT(dec),"src_1");
  gst_pad_set_offset(src_pad2,clock_time-pos2);
  gst_object_unref(src_pad2);

  return TRUE;
}
Exemplo n.º 4
0
GstPadProbeReturn GstEnginePipeline::DecodebinProbe(GstPad* pad,
        GstPadProbeInfo* info,
        gpointer data) {
    GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(data);
    const GstPadProbeType info_type = GST_PAD_PROBE_INFO_TYPE(info);

    if (info_type & GST_PAD_PROBE_TYPE_BUFFER) {
        // The decodebin produced a buffer.  Record its end time, so we can offset
        // the buffers produced by the next decodebin when transitioning to the next
        // song.
        GstBuffer* buffer = GST_PAD_PROBE_INFO_BUFFER(info);

        GstClockTime timestamp = GST_BUFFER_TIMESTAMP(buffer);
        GstClockTime duration = GST_BUFFER_DURATION(buffer);
        if (timestamp == GST_CLOCK_TIME_NONE) {
            timestamp = instance->last_decodebin_segment_.position;
        }

        if (duration != GST_CLOCK_TIME_NONE) {
            timestamp += duration;
        }

        instance->last_decodebin_segment_.position = timestamp;
    } else if (info_type & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) {
        GstEvent* event = GST_PAD_PROBE_INFO_EVENT(info);
        GstEventType event_type = GST_EVENT_TYPE(event);

        if (event_type == GST_EVENT_SEGMENT) {
            // A new segment started, we need to save this to calculate running time
            // offsets later.
            gst_event_copy_segment(event, &instance->last_decodebin_segment_);
        } else if (event_type == GST_EVENT_FLUSH_START) {
            // A flushing seek resets the running time to 0, so remove any offset
            // we set on this pad before.
            gst_pad_set_offset(pad, 0);
        }
    }

    return GST_PAD_PROBE_OK;
}
Exemplo n.º 5
0
void GstEnginePipeline::NewPadCallback(GstElement*, GstPad* pad,
                                       gpointer self) {
    GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);
    GstPad* const audiopad =
        gst_element_get_static_pad(instance->audiobin_, "sink");

    // Link decodebin's sink pad to audiobin's src pad.
    if (GST_PAD_IS_LINKED(audiopad)) {
        qLog(Warning) << instance->id()
                      << "audiopad is already linked, unlinking old pad";
        gst_pad_unlink(audiopad, GST_PAD_PEER(audiopad));
    }

    gst_pad_link(pad, audiopad);
    gst_object_unref(audiopad);

    // Offset the timestamps on all the buffers coming out of the decodebin so
    // they line up exactly with the end of the last buffer from the old
    // decodebin.
    // "Running time" is the time since the last flushing seek.
    GstClockTime running_time = gst_segment_to_running_time(
                                    &instance->last_decodebin_segment_, GST_FORMAT_TIME,
                                    instance->last_decodebin_segment_.position);
    gst_pad_set_offset(pad, running_time);

    // Add a probe to the pad so we can update last_decodebin_segment_.
    gst_pad_add_probe(
        pad, static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_BUFFER |
                                          GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM |
                                          GST_PAD_PROBE_TYPE_EVENT_FLUSH),
        DecodebinProbe, instance, nullptr);

    instance->pipeline_is_connected_ = true;
    if (instance->pending_seek_nanosec_ != -1 &&
            instance->pipeline_is_initialised_) {
        QMetaObject::invokeMethod(instance, "Seek", Qt::QueuedConnection,
                                  Q_ARG(qint64, instance->pending_seek_nanosec_));
    }
}