コード例 #1
0
static GstCaps *
gst_play_sink_audio_convert_getcaps (GstPad * pad)
{
  GstPlaySinkAudioConvert *self =
      GST_PLAY_SINK_AUDIO_CONVERT (gst_pad_get_parent (pad));
  GstCaps *ret;
  GstPad *otherpad, *peer = NULL;

  GST_PLAY_SINK_AUDIO_CONVERT_LOCK (self);
  otherpad = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (pad));
  GST_PLAY_SINK_AUDIO_CONVERT_UNLOCK (self);

  if (otherpad) {
    peer = gst_pad_get_peer (otherpad);
    gst_object_unref (otherpad);
    otherpad = NULL;
  }

  if (peer) {
    ret = gst_pad_get_caps_reffed (peer);
    gst_object_unref (peer);
  } else {
    ret = gst_caps_new_any ();
  }

  gst_object_unref (self);

  return ret;
}
コード例 #2
0
ファイル: gsttffilter.c プロジェクト: Sunmonds/wine
static GstFlowReturn request_buffer(GstPad *pad, guint64 ofs, guint size, GstCaps *caps, GstBuffer **buf) {
    GstTfImpl *This = gst_pad_get_element_private(pad);
    IMediaSample *sample;
    BYTE *ptr;
    HRESULT hr;
    TRACE("Requesting buffer\n");

    hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin*)This->tf.ppPins[1], &sample, NULL, NULL, 0);
    if (FAILED(hr)) {
        ERR("Could not get output buffer: %08x\n", hr);
        return GST_FLOW_WRONG_STATE;
    }
    IMediaSample_SetActualDataLength(sample, size);
    IMediaSample_GetPointer(sample, &ptr);
    *buf = gst_app_buffer_new(ptr, size, release_sample, sample);

    if (!*buf) {
        IMediaSample_Release(sample);
        ERR("Out of memory\n");
        return GST_FLOW_ERROR;
    }
    if (!caps)
        caps = gst_pad_get_caps_reffed(This->my_sink);
    gst_buffer_set_caps(*buf, caps);
    return GST_FLOW_OK;
}
コード例 #3
0
static GstCaps *
gst_play_sink_video_convert_getcaps (GstPad * pad)
{
    GstPlaySinkVideoConvert *self =
        GST_PLAY_SINK_VIDEO_CONVERT (gst_pad_get_parent (pad));
    GstCaps *ret;
    GstPad *otherpad, *peer;

    GST_PLAY_SINK_VIDEO_CONVERT_LOCK (self);
    if (pad == self->srcpad)
        otherpad = gst_object_ref (self->sinkpad);
    else
        otherpad = gst_object_ref (self->srcpad);
    GST_PLAY_SINK_VIDEO_CONVERT_UNLOCK (self);

    peer = gst_pad_get_peer (otherpad);
    if (peer) {
        ret = gst_pad_get_caps_reffed (peer);
        gst_object_unref (peer);
    } else {
        ret = gst_caps_new_any ();
    }

    gst_object_unref (otherpad);
    gst_object_unref (self);

    return ret;
}
コード例 #4
0
ファイル: gstalsasink.c プロジェクト: pli3/gst-plugins-base
static gboolean
gst_alsasink_acceptcaps (GstPad * pad, GstCaps * caps)
{
    GstAlsaSink *alsa = GST_ALSA_SINK (gst_pad_get_parent_element (pad));
    GstCaps *pad_caps;
    GstStructure *st;
    gboolean ret = FALSE;
    GstRingBufferSpec spec = { 0 };

    pad_caps = gst_pad_get_caps_reffed (pad);
    if (pad_caps) {
        ret = gst_caps_can_intersect (pad_caps, caps);
        gst_caps_unref (pad_caps);
        if (!ret)
            goto done;
    }

    /* If we've not got fixed caps, creating a stream might fail, so let's just
     * return from here with default acceptcaps behaviour */
    if (!gst_caps_is_fixed (caps))
        goto done;

    /* parse helper expects this set, so avoid nasty warning
     * will be set properly later on anyway  */
    spec.latency_time = GST_SECOND;
    if (!gst_ring_buffer_parse_caps (&spec, caps))
        goto done;

    /* Make sure input is framed (one frame per buffer) and can be payloaded */
    switch (spec.type) {
    case GST_BUFTYPE_AC3:
    case GST_BUFTYPE_EAC3:
    case GST_BUFTYPE_DTS:
    case GST_BUFTYPE_MPEG:
    {
        gboolean framed = FALSE, parsed = FALSE;
        st = gst_caps_get_structure (caps, 0);

        gst_structure_get_boolean (st, "framed", &framed);
        gst_structure_get_boolean (st, "parsed", &parsed);
        if ((!framed && !parsed) || gst_audio_iec61937_frame_size (&spec) <= 0)
            goto done;
    }
    default: {
    }
    }
    ret = TRUE;

done:
    gst_caps_replace (&spec.caps, NULL);
    gst_object_unref (alsa);
    return ret;
}
コード例 #5
0
ファイル: lgm-device.c プロジェクト: fluendo/VAS
static void
lgm_device_fill_formats (LgmDevice * device, const gchar * prop_name)
{
  GstCaps *source_caps, *caps;
  GstElement *source;
  GstPad *pad;
  GHashTable *table;
  gint i;

  source = gst_element_factory_make (device->source_name, NULL);
  if (!g_strcmp0 (device->source_name, "decklinkvideosrc")) {
    g_object_set (source, prop_name, 0, NULL);
  } else {
    g_object_set (source, prop_name, device->device_name, NULL);
  }
  gst_element_set_state (source, GST_STATE_READY);
  gst_element_get_state (source, NULL, NULL, 5 * GST_SECOND);
  pad = gst_element_get_static_pad (source, "src");
  source_caps = gst_pad_get_caps_reffed (pad);
  caps = gst_caps_copy (source_caps);
  gst_caps_unref (source_caps);

  table = g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);

  GST_DEBUG ("Filling formats for source:%s device:%s", device->source_name,
      device->device_name);
  if (!g_strcmp0 (device->source_name, "decklinkvideosrc")) {
    lgm_device_add_format (table, 0, 0, 0, 0);
  } else {
    for (i = 0; i < gst_caps_get_size (caps); i++) {
      GstStructure *s;

      s = gst_caps_get_structure (caps, i);
      if (gst_structure_has_name (s, "video/x-raw-yuv") ||
          gst_structure_has_name (s, "video/x-raw-rgb")) {
        lgm_device_parse_structure (s, table);
      } else if (gst_structure_has_name (s, "video/x-dv")) {
        lgm_device_add_format (table, 0, 0, 0, 0);
      }
    }
  }
  device->formats = g_hash_table_get_values (table);
  device->formats = g_list_sort (device->formats,
      (GCompareFunc) lgm_device_video_format_compare);
  g_hash_table_unref (table);

  gst_element_set_state (source, GST_STATE_NULL);
  gst_element_get_state (source, NULL, NULL, 5 * GST_SECOND);
  gst_object_unref (pad);
  gst_caps_unref (caps);
}
コード例 #6
0
ファイル: gsttffilter.c プロジェクト: Sunmonds/wine
static HRESULT WINAPI Gstreamer_transform_ProcessData(TransformFilter *iface, IMediaSample *sample) {
    GstTfImpl *This = (GstTfImpl*)iface;
    REFERENCE_TIME tStart, tStop;
    BYTE *data;
    GstBuffer *buf;
    HRESULT hr;
    int ret;
    TRACE("Reading %p\n", sample);

    EnterCriticalSection(&This->tf.filter.csFilter);
    IMediaSample_GetPointer(sample, &data);
    buf = gst_app_buffer_new(data, IMediaSample_GetActualDataLength(sample), release_sample, sample);
    if (!buf) {
        LeaveCriticalSection(&This->tf.filter.csFilter);
        return S_OK;
    }
    gst_buffer_set_caps(buf, gst_pad_get_caps_reffed(This->my_src));
    IMediaSample_AddRef(sample);
    buf->duration = buf->timestamp = -1;
    hr = IMediaSample_GetTime(sample, &tStart, &tStop);
    if (SUCCEEDED(hr)) {
        buf->timestamp = tStart * 100;
        if (hr == S_OK)
            buf->duration = (tStop - tStart)*100;
    }
    if (IMediaSample_GetMediaTime(sample, &tStart, &tStop) == S_OK) {
        buf->offset = tStart * 100;
        buf->offset_end = tStop * 100;
    }
    if (IMediaSample_IsDiscontinuity(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DISCONT);
    if (IMediaSample_IsPreroll(sample) == S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_PREROLL);
    if (IMediaSample_IsSyncPoint(sample) != S_OK)
        GST_BUFFER_FLAG_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT);
    LeaveCriticalSection(&This->tf.filter.csFilter);
    ret = gst_pad_push(This->my_src, buf);
    if (ret)
        WARN("Sending returned: %i\n", ret);
    if (ret == GST_FLOW_ERROR)
        return E_FAIL;
    if (ret == GST_FLOW_WRONG_STATE)
        return VFW_E_WRONG_STATE;
    if (ret == GST_FLOW_RESEND)
        return S_FALSE;
    return S_OK;
}
コード例 #7
0
static gint
find_matching_pad (gconstpointer a, gconstpointer b)
{
  GstPad *pad = GST_PAD (a);
  GstCaps *caps = GST_CAPS (b);
  GstCaps *padcaps = NULL;
  gint ret = 1;

  padcaps = gst_pad_get_caps_reffed (pad);

  if (gst_caps_can_intersect (caps, padcaps))
    ret = 0;

  gst_caps_unref (padcaps);
  gst_object_unref (pad);

  return ret;
}
コード例 #8
0
static gboolean
gst_decklink_src_audio_src_acceptcaps (GstPad * pad, GstCaps * caps)
{
  GstDecklinkSrc *decklinksrc;
  GstCaps *pad_caps;
  gboolean can_intersect;

  decklinksrc = GST_DECKLINK_SRC (gst_pad_get_parent (pad));

  pad_caps = gst_pad_get_caps_reffed (pad);
  can_intersect = gst_caps_can_intersect (pad_caps, caps);
  gst_caps_unref (pad_caps);

  GST_DEBUG_OBJECT (decklinksrc, "acceptcaps");

  gst_object_unref (decklinksrc);
  return can_intersect;
}
コード例 #9
0
void GstVideoPlayerBackend::decodeAudioPadReady(GstDecodeBin *bin, GstPad *pad, gboolean islast)
{
    Q_UNUSED(islast);

    /* TODO: Better cap detection */
    GstCaps *caps = gst_pad_get_caps_reffed(pad);
    Q_ASSERT(caps);
    gchar *capsstr = gst_caps_to_string(caps);
    gst_caps_unref(caps);

    if (QByteArray(capsstr).contains("audio"))
    {
        qDebug("gstreamer: linking audio decoder to pipeline");
        if (!gst_element_link(GST_ELEMENT(bin), m_audioLink))
        {
            setError(false, tr("Building audio pipeline failed"));
            return;
        }
    }

    g_free(capsstr);
}
コード例 #10
0
static void
gst_wrapper_camera_bin_src_caps_cb (GObject * gobject, GParamSpec * pspec,
    gpointer user_data)
{
  GstBaseCameraSrc *bcamsrc = GST_BASE_CAMERA_SRC (user_data);
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (user_data);
  GstPad *src_caps_src_pad;
  GstCaps *caps = NULL;
  GstStructure *in_st = NULL;

  /* get the new caps that were set on the capsfilter that configures the
   * source */
  src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");
  caps = gst_pad_get_caps_reffed (src_caps_src_pad);
  gst_object_unref (src_caps_src_pad);
  GST_DEBUG_OBJECT (self, "src-filter caps changed to %s",
      gst_caps_to_string (caps));

  if (gst_caps_get_size (caps)) {
    in_st = gst_caps_get_structure (caps, 0);
    if (in_st) {
      gst_structure_get_int (in_st, "width", &bcamsrc->width);
      gst_structure_get_int (in_st, "height", &bcamsrc->height);

      GST_DEBUG_OBJECT (self, "Source dimensions now: %dx%d", bcamsrc->width,
          bcamsrc->height);
    }
  }

  /* Update zoom */
  gst_base_camera_src_setup_zoom (bcamsrc);

  /* Update post-zoom capsfilter */
  if (self->src_zoom_filter)
    g_object_set (G_OBJECT (self->src_zoom_filter), "caps", caps, NULL);

  /* drop our ref on the caps */
  gst_caps_unref (caps);
};
コード例 #11
0
ファイル: gst-nle-source.c プロジェクト: fluendo/VAS
static void
gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
    GstNleSource * nlesrc)
{
  GstCaps *caps;
  const GstStructure *s;
  const gchar *mime;
  GstElement *appsink = NULL;
  GstPad *sink_pad;
  GstAppSinkCallbacks appsink_cbs;
  GstNleSrcItem *item;

  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  caps = gst_pad_get_caps_reffed (pad);
  s = gst_caps_get_structure (caps, 0);
  mime = gst_structure_get_name (s);
  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);

  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_video_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;
    nlesrc->video_linked = TRUE;
    if (!nlesrc->video_srcpad_added) {
      gst_pad_set_active (nlesrc->video_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->video_srcpad));
      nlesrc->video_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);
    nlesrc->video_eos = FALSE;
  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio
      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_audio_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;
    nlesrc->audio_linked = TRUE;
    if (!nlesrc->audio_srcpad_added) {
      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->audio_srcpad));
      nlesrc->audio_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);
    nlesrc->audio_eos = FALSE;
  }
  if (appsink != NULL) {
    g_object_set (appsink, "sync", FALSE, NULL);
    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,
        NULL);
    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);
    sink_pad = gst_element_get_static_pad (appsink, "sink");
    gst_pad_link (pad, sink_pad);
    gst_element_sync_state_with_parent (appsink);
    gst_object_unref (sink_pad);
  }
}
コード例 #12
0
/* Fetches a compatible pad on the target element which isn't already
 * linked */
static GstPad *
get_compatible_unlinked_pad (GstElement * element, GstPad * pad)
{
  GstPad *res = NULL;
  GstIterator *pads;
  gboolean done = FALSE;
  GstCaps *srccaps;

  if (G_UNLIKELY (pad == NULL))
    goto no_pad;

  GST_DEBUG ("element : %s, pad %s:%s",
      GST_ELEMENT_NAME (element), GST_DEBUG_PAD_NAME (pad));

  if (GST_PAD_DIRECTION (pad) == GST_PAD_SRC)
    pads = gst_element_iterate_sink_pads (element);
  else
    pads = gst_element_iterate_src_pads (element);
  srccaps = gst_pad_get_caps_reffed (pad);

  GST_DEBUG ("srccaps %" GST_PTR_FORMAT, srccaps);

  while (!done) {
    gpointer padptr;

    switch (gst_iterator_next (pads, &padptr)) {
      case GST_ITERATOR_OK:
      {
        GstPad *testpad = (GstPad *) padptr;

        if (gst_pad_is_linked (testpad)) {
          gst_object_unref (testpad);
        } else {
          GstCaps *sinkcaps = gst_pad_get_caps_reffed (testpad);

          GST_DEBUG ("sinkccaps %" GST_PTR_FORMAT, sinkcaps);

          if (gst_caps_can_intersect (srccaps, sinkcaps)) {
            res = testpad;
            done = TRUE;
          } else
            gst_object_unref (testpad);
          gst_caps_unref (sinkcaps);
        }
      }
        break;
      case GST_ITERATOR_DONE:
      case GST_ITERATOR_ERROR:
        done = TRUE;
        break;
      case GST_ITERATOR_RESYNC:
        gst_iterator_resync (pads);
        break;
    }
  }
  gst_iterator_free (pads);
  gst_caps_unref (srccaps);

  return res;

no_pad:
  {
    GST_ERROR ("No pad to check against");
    return NULL;
  }
}
コード例 #13
0
ファイル: pulsesrc.c プロジェクト: spunktsch/svtplayer
/* This is essentially gst_base_src_negotiate_default() but the caps
 * are guaranteed to have a channel layout for > 2 channels
 */
static gboolean
gst_pulsesrc_negotiate (GstBaseSrc * basesrc)
{
  GstCaps *thiscaps;
  GstCaps *caps = NULL;
  GstCaps *peercaps = NULL;
  gboolean result = FALSE;

  /* first see what is possible on our source pad */
  thiscaps = gst_pad_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
  /* nothing or anything is allowed, we're done */
  if (thiscaps == NULL || gst_caps_is_any (thiscaps))
    goto no_nego_needed;

  /* get the peer caps */
  peercaps = gst_pad_peer_get_caps_reffed (GST_BASE_SRC_PAD (basesrc));
  GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
  if (peercaps) {
    /* get intersection */
    caps = gst_caps_intersect (thiscaps, peercaps);
    GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
    gst_caps_unref (thiscaps);
    gst_caps_unref (peercaps);
  } else {
    /* no peer, work with our own caps then */
    caps = thiscaps;
  }
  if (caps) {
    /* take first (and best, since they are sorted) possibility */
    caps = gst_caps_make_writable (caps);
    gst_caps_truncate (caps);

    /* now fixate */
    if (!gst_caps_is_empty (caps)) {
      gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
      GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);

      if (gst_caps_is_any (caps)) {
        /* hmm, still anything, so element can do anything and
         * nego is not needed */
        result = TRUE;
      } else if (gst_caps_is_fixed (caps)) {
        /* yay, fixed caps, use those then */
        result = gst_pulsesrc_create_stream (GST_PULSESRC_CAST (basesrc), caps);
        if (result)
          result = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps);
      }
    }
    gst_caps_unref (caps);
  }
  return result;

no_nego_needed:
  {
    GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
    if (thiscaps)
      gst_caps_unref (thiscaps);
    return TRUE;
  }
}
コード例 #14
0
static void
uridecodebin_pad_added_cb (GstElement * uridecodebin, GstPad * pad,
    GstDiscoverer * dc)
{
  PrivateStream *ps;
  GstPad *sinkpad = NULL;
  GstCaps *caps;
  static GstCaps *subs_caps = NULL;

  if (!subs_caps) {
    subs_caps = gst_caps_from_string ("text/plain; text/x-pango-markup; "
        "subpicture/x-pgs; subpicture/x-dvb; application/x-subtitle-unknown; "
        "application/x-ssa; application/x-ass; subtitle/x-kate; "
        "video/x-dvd-subpicture; ");
  }

  GST_DEBUG_OBJECT (dc, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));

  ps = g_slice_new0 (PrivateStream);

  ps->dc = dc;
  ps->pad = pad;
  ps->queue = gst_element_factory_make ("queue", NULL);
  ps->sink = gst_element_factory_make ("fakesink", NULL);

  if (G_UNLIKELY (ps->queue == NULL || ps->sink == NULL))
    goto error;

  g_object_set (ps->sink, "silent", TRUE, NULL);
  g_object_set (ps->queue, "max-size-buffers", 1, "silent", TRUE, NULL);

  caps = gst_pad_get_caps_reffed (pad);

  if (gst_caps_can_intersect (caps, subs_caps)) {
    /* Subtitle streams are sparse and don't provide any information - don't
     * wait for data to preroll */
    g_object_set (ps->sink, "async", FALSE, NULL);
  }

  gst_caps_unref (caps);

  gst_bin_add_many (dc->priv->pipeline, ps->queue, ps->sink, NULL);

  if (!gst_element_link_pads_full (ps->queue, "src", ps->sink, "sink",
          GST_PAD_LINK_CHECK_NOTHING))
    goto error;
  if (!gst_element_sync_state_with_parent (ps->sink))
    goto error;
  if (!gst_element_sync_state_with_parent (ps->queue))
    goto error;

  sinkpad = gst_element_get_static_pad (ps->queue, "sink");
  if (sinkpad == NULL)
    goto error;
  if (gst_pad_link_full (pad, sinkpad,
          GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)
    goto error;
  gst_object_unref (sinkpad);

  /* Add an event probe */
  gst_pad_add_event_probe (pad, G_CALLBACK (_event_probe), ps);

  DISCO_LOCK (dc);
  dc->priv->streams = g_list_append (dc->priv->streams, ps);
  DISCO_UNLOCK (dc);

  GST_DEBUG_OBJECT (dc, "Done handling pad");

  return;

error:
  GST_ERROR_OBJECT (dc, "Error while handling pad");
  if (sinkpad)
    gst_object_unref (sinkpad);
  if (ps->queue)
    gst_object_unref (ps->queue);
  if (ps->sink)
    gst_object_unref (ps->sink);
  g_slice_free (PrivateStream, ps);
  return;
}
コード例 #15
0
ファイル: gstcamerabin2.c プロジェクト: PeterXu/gst-mobile
static void
gst_camera_bin_get_property (GObject * object, guint prop_id,
    GValue * value, GParamSpec * pspec)
{
  GstCameraBin *camera = GST_CAMERA_BIN_CAST (object);

  switch (prop_id) {
    case PROP_MODE:
      g_value_set_enum (value, camera->mode);
      break;
    case PROP_LOCATION:
      if (camera->mode == MODE_VIDEO) {
        g_value_set_string (value, camera->video_location);
      } else {
        g_value_set_string (value, camera->image_location);
      }
      break;
    case PROP_CAMERA_SRC:
      g_value_set_object (value, camera->src);
      break;
    case PROP_VIDEO_CAPTURE_SUPPORTED_CAPS:
    case PROP_IMAGE_CAPTURE_SUPPORTED_CAPS:{
      GstPad *pad;
      GstCaps *caps;
      const gchar *padname;

      if (prop_id == PROP_VIDEO_CAPTURE_SUPPORTED_CAPS) {
        padname = GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME;
      } else {
        padname = GST_BASE_CAMERA_SRC_IMAGE_PAD_NAME;
      }

      if (camera->src) {
        pad = gst_element_get_static_pad (camera->src, padname);

        g_assert (pad != NULL);

        /* TODO not sure if we want get_caps or get_allowed_caps to already
         * consider the full pipeline scenario and avoid picking a caps that
         * won't negotiate. Need to take care on the special case of the
         * pad being unlinked.
         */
        caps = gst_pad_get_caps_reffed (pad);
        if (caps) {
          gst_value_set_caps (value, caps);
          gst_caps_unref (caps);
        }

        gst_object_unref (pad);
      } else {
        GST_DEBUG_OBJECT (camera, "Camera source not created, can't get "
            "supported caps");
      }
    }
      break;
    case PROP_IMAGE_CAPTURE_CAPS:{
      GstCaps *caps = NULL;
      g_object_get (camera->imagebin_capsfilter, "caps", &caps, NULL);
      gst_value_set_caps (value, caps);
      gst_caps_unref (caps);
    }
      break;
    case PROP_VIDEO_CAPTURE_CAPS:{
      GstCaps *caps = NULL;
      g_object_get (camera->videobin_capsfilter, "caps", &caps, NULL);
      gst_value_set_caps (value, caps);
      gst_caps_unref (caps);
    }
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
コード例 #16
0
static void
pad_blocked_cb (GstPad * pad, gboolean blocked, GstPlaySinkVideoConvert * self)
{
    GstPad *peer;
    GstCaps *caps;
    gboolean raw;

    GST_PLAY_SINK_VIDEO_CONVERT_LOCK (self);
    self->sink_proxypad_blocked = blocked;
    GST_DEBUG_OBJECT (self, "Pad blocked: %d", blocked);
    if (!blocked)
        goto done;

    /* There must be a peer at this point */
    peer = gst_pad_get_peer (self->sinkpad);
    caps = gst_pad_get_negotiated_caps (peer);
    if (!caps)
        caps = gst_pad_get_caps_reffed (peer);
    gst_object_unref (peer);

    raw = is_raw_caps (caps);
    GST_DEBUG_OBJECT (self, "Caps %" GST_PTR_FORMAT " are raw: %d", caps, raw);
    gst_caps_unref (caps);

    if (raw == self->raw)
        goto unblock;
    self->raw = raw;

    if (raw) {
        GstBin *bin = GST_BIN_CAST (self);
        GstElement *head = NULL, *prev = NULL;
        GstPad *pad;

        GST_DEBUG_OBJECT (self, "Creating raw conversion pipeline");

        gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL);
        gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL);

        self->conv = gst_element_factory_make ("ffmpegcolorspace", "conv");
        if (self->conv == NULL) {
            post_missing_element_message (self, "ffmpegcolorspace");
            GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
                                 (_("Missing element '%s' - check your GStreamer installation."),
                                  "ffmpegcolorspace"), ("video rendering might fail"));
        } else {
            gst_bin_add (bin, self->conv);
            gst_element_sync_state_with_parent (self->conv);
            distribute_running_time (self->conv, &self->segment);
            prev = head = self->conv;
        }

        self->scale = gst_element_factory_make ("videoscale", "scale");
        if (self->scale == NULL) {
            post_missing_element_message (self, "videoscale");
            GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
                                 (_("Missing element '%s' - check your GStreamer installation."),
                                  "videoscale"), ("possibly a liboil version mismatch?"));
        } else {
            /* Add black borders if necessary to keep the DAR */
            g_object_set (self->scale, "add-borders", TRUE, NULL);
            gst_bin_add (bin, self->scale);
            gst_element_sync_state_with_parent (self->scale);
            distribute_running_time (self->scale, &self->segment);
            if (prev) {
                if (!gst_element_link_pads_full (prev, "src", self->scale, "sink",
                                                 GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
                    goto link_failed;
            } else {
                head = self->scale;
            }
            prev = self->scale;
        }

        if (head) {
            pad = gst_element_get_static_pad (head, "sink");
            gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), pad);
            gst_object_unref (pad);
        }

        if (prev) {
            pad = gst_element_get_static_pad (prev, "src");
            gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), pad);
            gst_object_unref (pad);
        }

        if (!head && !prev) {
            gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad),
                                      self->sink_proxypad);
        }

        GST_DEBUG_OBJECT (self, "Raw conversion pipeline created");
    } else {
        GstBin *bin = GST_BIN_CAST (self);

        GST_DEBUG_OBJECT (self, "Removing raw conversion pipeline");

        gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL);
        gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL);

        if (self->conv) {
            gst_element_set_state (self->conv, GST_STATE_NULL);
            gst_bin_remove (bin, self->conv);
            self->conv = NULL;
        }
        if (self->scale) {
            gst_element_set_state (self->scale, GST_STATE_NULL);
            gst_bin_remove (bin, self->scale);
            self->scale = NULL;
        }

        gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad),
                                  self->sink_proxypad);

        GST_DEBUG_OBJECT (self, "Raw conversion pipeline removed");
    }

unblock:
    gst_pad_set_blocked_async_full (self->sink_proxypad, FALSE,
                                    (GstPadBlockCallback) pad_blocked_cb, gst_object_ref (self),
                                    (GDestroyNotify) gst_object_unref);

done:
    GST_PLAY_SINK_VIDEO_CONVERT_UNLOCK (self);
    return;

link_failed:
    {
        GST_ELEMENT_ERROR (self, CORE, PAD,
                           (NULL), ("Failed to configure the video converter."));
        gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad),
                                  self->sink_proxypad);
        gst_pad_set_blocked_async_full (self->sink_proxypad, FALSE,
                                        (GstPadBlockCallback) pad_blocked_cb, gst_object_ref (self),
                                        (GDestroyNotify) gst_object_unref);
        return;
    }
}
コード例 #17
0
static void
pad_added_cb (GstElement * timeline, GstPad * pad, GESTimelinePipeline * self)
{
  OutputChain *chain;
  GESTrack *track;
  GstPad *sinkpad;
  gboolean reconfigured = FALSE;

  GST_DEBUG_OBJECT (self, "new pad %s:%s , caps:%" GST_PTR_FORMAT,
      GST_DEBUG_PAD_NAME (pad), GST_PAD_CAPS (pad));

  if (G_UNLIKELY (!(track =
              ges_timeline_get_track_for_pad (self->priv->timeline, pad)))) {
    GST_WARNING_OBJECT (self, "Couldn't find coresponding track !");
    return;
  }

  /* Don't connect track if it's not going to be used */
  if (track->type == GES_TRACK_TYPE_VIDEO &&
      !(self->priv->mode & TIMELINE_MODE_PREVIEW_VIDEO) &&
      !(self->priv->mode & TIMELINE_MODE_RENDER) &&
      !(self->priv->mode & TIMELINE_MODE_SMART_RENDER)) {
    GST_DEBUG_OBJECT (self, "Video track... but we don't need it. Not linking");
  }
  if (track->type == GES_TRACK_TYPE_AUDIO &&
      !(self->priv->mode & TIMELINE_MODE_PREVIEW_AUDIO) &&
      !(self->priv->mode & TIMELINE_MODE_RENDER) &&
      !(self->priv->mode & TIMELINE_MODE_SMART_RENDER)) {
    GST_DEBUG_OBJECT (self, "Audio track... but we don't need it. Not linking");
  }

  /* Get an existing chain or create it */
  if (!(chain = get_output_chain_for_track (self, track)))
    chain = new_output_chain_for_track (self, track);
  chain->srcpad = pad;

  /* Adding tee */
  chain->tee = gst_element_factory_make ("tee", NULL);
  gst_bin_add (GST_BIN_CAST (self), chain->tee);
  gst_element_sync_state_with_parent (chain->tee);

  /* Linking pad to tee */
  sinkpad = gst_element_get_static_pad (chain->tee, "sink");
  gst_pad_link_full (pad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
  gst_object_unref (sinkpad);

  /* Connect playsink */
  if (self->priv->mode & TIMELINE_MODE_PREVIEW) {
    const gchar *sinkpad_name;
    GstPad *tmppad;

    GST_DEBUG_OBJECT (self, "Connecting to playsink");

    switch (track->type) {
      case GES_TRACK_TYPE_VIDEO:
        sinkpad_name = "video_sink";
        break;
      case GES_TRACK_TYPE_AUDIO:
        sinkpad_name = "audio_sink";
        break;
      case GES_TRACK_TYPE_TEXT:
        sinkpad_name = "text_sink";
        break;
      default:
        GST_WARNING_OBJECT (self, "Can't handle tracks of type %d yet",
            track->type);
        goto error;
    }

    /* Request a sinkpad from playsink */
    if (G_UNLIKELY (!(sinkpad =
                gst_element_get_request_pad (self->priv->playsink,
                    sinkpad_name)))) {
      GST_ERROR_OBJECT (self, "Couldn't get a pad from the playsink !");
      goto error;
    }

    tmppad = gst_element_get_request_pad (chain->tee, "src%d");
    if (G_UNLIKELY (gst_pad_link_full (tmppad, sinkpad,
                GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
      GST_ERROR_OBJECT (self, "Couldn't link track pad to playsink");
      gst_object_unref (tmppad);
      goto error;
    }
    gst_object_unref (tmppad);

    GST_DEBUG ("Reconfiguring playsink");

    /* reconfigure playsink */
    g_signal_emit_by_name (self->priv->playsink, "reconfigure", &reconfigured);
    GST_DEBUG ("'reconfigure' returned %d", reconfigured);

    /* We still hold a reference on the sinkpad */
    chain->playsinkpad = sinkpad;
  }

  /* Connect to encodebin */
  if (self->priv->mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER)) {
    GstPad *tmppad;
    GST_DEBUG_OBJECT (self, "Connecting to encodebin");

    if (!chain->encodebinpad) {
      /* Check for unused static pads */
      sinkpad = get_compatible_unlinked_pad (self->priv->encodebin, pad);

      if (sinkpad == NULL) {
        GstCaps *caps = gst_pad_get_caps_reffed (pad);
        /* If no compatible static pad is available, request a pad */
        g_signal_emit_by_name (self->priv->encodebin, "request-pad", caps,
            &sinkpad);
        gst_caps_unref (caps);
        if (G_UNLIKELY (sinkpad == NULL)) {
          GST_ERROR_OBJECT (self, "Couldn't get a pad from encodebin !");
          goto error;
        }
      }
      chain->encodebinpad = sinkpad;
    }

    tmppad = gst_element_get_request_pad (chain->tee, "src%d");
    if (G_UNLIKELY (gst_pad_link_full (tmppad,
                chain->encodebinpad,
                GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
      GST_WARNING_OBJECT (self, "Couldn't link track pad to playsink");
      goto error;
    }
    gst_object_unref (tmppad);

  }

  /* If chain wasn't already present, insert it in list */
  if (!get_output_chain_for_track (self, track))
    self->priv->chains = g_list_append (self->priv->chains, chain);

  GST_DEBUG ("done");
  return;

error:
  {
    if (chain->tee) {
      gst_bin_remove (GST_BIN_CAST (self), chain->tee);
    }
    if (sinkpad)
      gst_object_unref (sinkpad);
    g_free (chain);
  }
}