Exemple #1
0
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  int i;

  gst_init (&argc, &argv);

  app->pipe = gst_pipeline_new (NULL);
  g_assert (app->pipe);

  app->src = gst_element_factory_make ("appsrc", NULL);
  g_assert (app->src);
  gst_bin_add (GST_BIN (app->pipe), app->src);

  app->id = gst_element_factory_make ("identity", NULL);
  g_assert (app->id);
  gst_bin_add (GST_BIN (app->pipe), app->id);

  app->sink = gst_element_factory_make ("appsink", NULL);
  g_assert (app->sink);
  gst_bin_add (GST_BIN (app->pipe), app->sink);

  gst_element_link (app->src, app->id);
  gst_element_link (app->id, app->sink);

  gst_element_set_state (app->pipe, GST_STATE_PLAYING);

  for (i = 0; i < 10; i++) {
    GstBuffer *buf;
    GstMapInfo map;

    buf = gst_buffer_new_and_alloc (100);
    gst_buffer_map (buf, &map, GST_MAP_WRITE);
    memset (map.data, i, 100);
    gst_buffer_unmap (buf, &map);

    printf ("%d: pushing buffer for pointer %p, %p\n", i, map.data, buf);
    gst_app_src_push_buffer (GST_APP_SRC (app->src), buf);
  }

  /* push EOS */
  gst_app_src_end_of_stream (GST_APP_SRC (app->src));

  /* _is_eos() does not block and returns TRUE if there is not currently an EOS
   * to be retrieved */
  while (!gst_app_sink_is_eos (GST_APP_SINK (app->sink))) {
    GstSample *sample;

    /* pull the next item, this can return NULL when there is no more data and
     * EOS has been received */
    sample = gst_app_sink_pull_sample (GST_APP_SINK (app->sink));
    printf ("retrieved sample %p\n", sample);
    if (sample)
      gst_sample_unref (sample);
  }
  gst_element_set_state (app->pipe, GST_STATE_NULL);

  return 0;
}
int
main (int argc, char *argv[])
{
  App *app = &s_app;
  int i;

  gst_init (&argc, &argv);

  app->pipe = gst_pipeline_new (NULL);
  g_assert (app->pipe);

  app->src = gst_element_factory_make ("appsrc", NULL);
  g_assert (app->src);
  gst_bin_add (GST_BIN (app->pipe), app->src);

  app->id = gst_element_factory_make ("identity", NULL);
  g_assert (app->id);
  gst_bin_add (GST_BIN (app->pipe), app->id);

  app->sink = gst_element_factory_make ("appsink", NULL);
  g_assert (app->sink);
  gst_bin_add (GST_BIN (app->pipe), app->sink);

  gst_element_link (app->src, app->id);
  gst_element_link (app->id, app->sink);

  gst_element_set_state (app->pipe, GST_STATE_PLAYING);

  for (i = 0; i < 10; i++) {
    GstBuffer *buf;
    void *data;

    data = malloc (100);
    memset (data, i, 100);

    buf = gst_app_buffer_new (data, 100, dont_eat_my_chicken_wings, data);
    printf ("%d: creating buffer for pointer %p, %p\n", i, data, buf);
    gst_app_src_push_buffer (GST_APP_SRC (app->src), buf);
  }

  gst_app_src_end_of_stream (GST_APP_SRC (app->src));

  while (!gst_app_sink_is_eos (GST_APP_SINK (app->sink))) {
    GstBuffer *buf;

    buf = gst_app_sink_pull_buffer (GST_APP_SINK (app->sink));
    printf ("retrieved buffer %p\n", buf);
    gst_buffer_unref (buf);
  }
  gst_element_set_state (app->pipe, GST_STATE_NULL);

  return 0;
}
Exemple #3
0
  extern int
feed_buffer_to_gst (const char *audio, size_t b_len, GNUNET_gstData * d)
{
  GstBuffer *b;
  gchar *bufspace;
  GstFlowReturn flow;

  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
	      "Feeding %u bytes to GStreamer\n",
	      (unsigned int) b_len);

  bufspace = g_memdup (audio, b_len);
  b = gst_buffer_new_wrapped (bufspace, b_len);
  if (NULL == b)
  {
    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
		"Failed to wrap a buffer\n");
    g_free (bufspace);
    return GNUNET_SYSERR;
  }
  if (GST_APP_SRC(d->appsrc) == NULL)
    exit(10);
  flow = gst_app_src_push_buffer (GST_APP_SRC(d->appsrc), b);
  /* They all return GNUNET_OK, because currently player stops when
   * data stops coming. This might need to be changed for the player
   * to also stop when pipeline breaks.
   */
  switch (flow)
  {
  case GST_FLOW_OK:
    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
		"Fed %u bytes to the pipeline\n",
		(unsigned int) b_len);
    break;
  case GST_FLOW_FLUSHING:
    /* buffer was dropped, because pipeline state is not PAUSED or PLAYING */
    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
		"Dropped a buffer\n");
    break;
  case GST_FLOW_EOS:
    /* end of stream */
    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
		"EOS\n");
    break;
  default:
    GNUNET_log (GNUNET_ERROR_TYPE_WARNING,
		"Unexpected push result\n");
    break;
  }
  return GNUNET_OK;
}
Exemple #4
0
static void
gst_app_src_get_property (GObject * object, guint prop_id, GValue * value,
    GParamSpec * pspec)
{
  GstAppSrc *appsrc = GST_APP_SRC (object);

  switch (prop_id) {
    case PROP_CAPS:
    {
      GstCaps *caps;

      /* we're missing a _take_caps() function to transfer ownership */
      caps = gst_app_src_get_caps (appsrc);
      gst_value_set_caps (value, caps);
      if (caps)
        gst_caps_unref (caps);
      break;
    }
    case PROP_SIZE:
      g_value_set_int64 (value, gst_app_src_get_size (appsrc));
      break;
    case PROP_STREAM_TYPE:
      g_value_set_enum (value, gst_app_src_get_stream_type (appsrc));
      break;
    case PROP_MAX_BYTES:
      g_value_set_uint64 (value, gst_app_src_get_max_bytes (appsrc));
      break;
    case PROP_FORMAT:
      g_value_set_enum (value, appsrc->priv->format);
      break;
    case PROP_BLOCK:
      g_value_set_boolean (value, appsrc->priv->block);
      break;
    case PROP_IS_LIVE:
      g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (appsrc)));
      break;
    case PROP_MIN_LATENCY:
    {
      guint64 min;

      gst_app_src_get_latency (appsrc, &min, NULL);
      g_value_set_int64 (value, min);
      break;
    }
    case PROP_MAX_LATENCY:
    {
      guint64 max;

      gst_app_src_get_latency (appsrc, &max, NULL);
      g_value_set_int64 (value, max);
      break;
    }
    case PROP_EMIT_SIGNALS:
      g_value_set_boolean (value, gst_app_src_get_emit_signals (appsrc));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
void GStreamerReader::PlayBinSourceSetup(GstAppSrc* aSource)
{
  mSource = GST_APP_SRC(aSource);
  gst_app_src_set_callbacks(mSource, &mSrcCallbacks, (gpointer) this, nullptr);
  MediaResource* resource = mDecoder->GetResource();

  /* do a short read to trigger a network request so that GetLength() below
   * returns something meaningful and not -1
   */
  char buf[512];
  unsigned int size = 0;
  resource->Read(buf, sizeof(buf), &size);
  resource->Seek(SEEK_SET, 0);

  /* now we should have a length */
  int64_t resourceLength = resource->GetLength();
  gst_app_src_set_size(mSource, resourceLength);
  if (resource->IsDataCachedToEndOfResource(0) ||
      (resourceLength != -1 && resourceLength <= SHORT_FILE_SIZE)) {
    /* let the demuxer work in pull mode for local files (or very short files)
     * so that we get optimal seeking accuracy/performance
     */
    LOG(PR_LOG_DEBUG, ("configuring random access, len %lld", resourceLength));
    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_RANDOM_ACCESS);
  } else {
    /* make the demuxer work in push mode so that seeking is kept to a minimum
     */
    LOG(PR_LOG_DEBUG, ("configuring push mode, len %lld", resourceLength));
    gst_app_src_set_stream_type(mSource, GST_APP_STREAM_TYPE_SEEKABLE);
  }
}
void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
    RefPtr<MediaSample> rsample = prsample;
    AtomicString trackId = rsample->trackID();

    TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));

    ASSERT(WTF::isMainThread());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        WARN_MEDIA_MESSAGE("No stream!");
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GstSample* gstsample = gst_sample_ref(sample->sample());
        GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
        push_sample(GST_APP_SRC(appsrc), gstsample);
        // gst_app_src_push_sample() uses transfer-none for gstsample
        gst_sample_unref(gstsample);
    }
}
void PlaybackPipeline::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus)
{
    WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;
    GList *l;

    GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Have EOS");

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    bool allTracksConfigured = priv->allTracksConfigured;
    if (!allTracksConfigured) {
        priv->allTracksConfigured = true;
    }
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    if (!allTracksConfigured) {
        gst_element_no_more_pads(GST_ELEMENT(m_webKitMediaSrc.get()));
        webKitMediaSrcDoAsyncDone(m_webKitMediaSrc.get());
    }

    Vector<GstAppSrc*> appSrcs;

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    for (l = priv->streams; l; l = l->next) {
        Stream *stream = static_cast<Stream*>(l->data);
        if (stream->appsrc)
            appSrcs.append(GST_APP_SRC(stream->appsrc));
    }
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    for (Vector<GstAppSrc*>::iterator it = appSrcs.begin(); it != appSrcs.end(); ++it)
        gst_app_src_end_of_stream(*it);
}
Exemple #8
0
static gboolean
gst_app_src_query (GstBaseSrc * src, GstQuery * query)
{
  GstAppSrc *appsrc = GST_APP_SRC (src);
  gboolean res;

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_LATENCY:
    {
      GstClockTime min, max;
      gboolean live;

      /* Query the parent class for the defaults */
      res = gst_base_src_query_latency (src, &live, &min, &max);

      /* overwrite with our values when we need to */
      g_mutex_lock (appsrc->priv->mutex);
      if (appsrc->priv->min_latency != -1)
        min = appsrc->priv->min_latency;
      if (appsrc->priv->max_latency != -1)
        max = appsrc->priv->max_latency;
      g_mutex_unlock (appsrc->priv->mutex);

      gst_query_set_latency (query, live, min, max);
      break;
    }
    default:
      res = GST_BASE_SRC_CLASS (parent_class)->query (src, query);
      break;
  }

  return res;
}
Exemple #9
0
static GstCaps *
gst_app_src_internal_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
  GstAppSrc *appsrc = GST_APP_SRC (bsrc);
  GstCaps *caps;

  GST_OBJECT_LOCK (appsrc);
  if ((caps = appsrc->priv->caps))
    gst_caps_ref (caps);
  GST_OBJECT_UNLOCK (appsrc);

  if (filter) {
    if (caps) {
      GstCaps *intersection =
          gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
      gst_caps_unref (caps);
      caps = intersection;
    } else {
      caps = gst_caps_ref (filter);
    }
  }

  GST_DEBUG_OBJECT (appsrc, "caps: %" GST_PTR_FORMAT, caps);
  return caps;
}
Exemple #10
0
static void
gst_app_src_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAppSrc *appsrc = GST_APP_SRC (object);

  switch (prop_id) {
    case PROP_CAPS:
      gst_app_src_set_caps (appsrc, gst_value_get_caps (value));
      break;
    case PROP_SIZE:
      gst_app_src_set_size (appsrc, g_value_get_int64 (value));
      break;
    case PROP_STREAM_TYPE:
      gst_app_src_set_stream_type (appsrc, g_value_get_enum (value));
      break;
    case PROP_MAX_BYTES:
      gst_app_src_set_max_bytes (appsrc, g_value_get_uint64 (value));
      break;
    case PROP_FORMAT:
      appsrc->format = g_value_get_enum (value);
      break;
    case PROP_BLOCK:
      appsrc->block = g_value_get_boolean (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
static void
symmetry_test_setup (SymmetryTest * st, GstElement * sink, GstElement * src)
{
  GstCaps *caps;
  st->sink = sink;
  g_object_set (sink, "sync", FALSE, NULL);
  st->src = src;

  st->sink_pipeline = GST_PIPELINE (gst_pipeline_new (NULL));
  st->src_pipeline = GST_PIPELINE (gst_pipeline_new (NULL));

  st->sink_src = GST_APP_SRC (gst_element_factory_make ("appsrc", NULL));
  fail_unless (st->sink_src != NULL);
  caps = gst_caps_from_string ("application/x-gst-check");
  gst_app_src_set_caps (st->sink_src, caps);
  gst_caps_unref (caps);

  gst_bin_add_many (GST_BIN (st->sink_pipeline), GST_ELEMENT (st->sink_src),
      st->sink, NULL);
  fail_unless (gst_element_link_many (GST_ELEMENT (st->sink_src), st->sink,
          NULL));

  st->src_sink = GST_APP_SINK (gst_element_factory_make ("appsink", NULL));
  fail_unless (st->src_sink != NULL);
  gst_bin_add_many (GST_BIN (st->src_pipeline), st->src,
      GST_ELEMENT (st->src_sink), NULL);
  fail_unless (gst_element_link_many (st->src, GST_ELEMENT (st->src_sink),
          NULL));

  gst_element_set_state (GST_ELEMENT (st->sink_pipeline), GST_STATE_PLAYING);
  gst_element_set_state (GST_ELEMENT (st->src_pipeline), GST_STATE_PLAYING);
}
GstElement *VideoHttpBuffer::setupSrcElement(GstElement *pipeline)
{
    Q_ASSERT(!m_element && !m_pipeline);
    if (m_element || m_pipeline)
    {
        if (m_pipeline == pipeline)
            return GST_ELEMENT(m_element);
        return 0;
    }

    m_element = GST_APP_SRC(gst_element_factory_make("appsrc", "source"));
    if (!m_element)
        return 0;

    g_object_ref(m_element);

    m_pipeline = pipeline;

    gst_app_src_set_max_bytes(m_element, 0);
    gst_app_src_set_stream_type(m_element, GST_APP_STREAM_TYPE_RANDOM_ACCESS);

    GstAppSrcCallbacks callbacks;
    memset(&callbacks, 0, sizeof(callbacks));
    callbacks.need_data = needDataWrap;
    callbacks.seek_data = (gboolean (*)(GstAppSrc*,guint64,void*))seekDataWrap;
    gst_app_src_set_callbacks(m_element, &callbacks, this, 0);

    if (media && media->fileSize())
        gst_app_src_set_size(m_element, media->fileSize());

    gst_bin_add(GST_BIN(m_pipeline), GST_ELEMENT(m_element));
    return GST_ELEMENT(m_element);
}
Exemple #13
0
/* will be called in push mode */
static gboolean
gst_app_src_do_seek (GstBaseSrc * src, GstSegment * segment)
{
  GstAppSrc *appsrc = GST_APP_SRC (src);
  gint64 desired_position;
  gboolean res = FALSE;

  desired_position = segment->last_stop;

  GST_DEBUG_OBJECT (appsrc, "seeking to %" G_GINT64_FORMAT ", format %s",
      desired_position, gst_format_get_name (segment->format));

  /* no need to try to seek in streaming mode */
  if (appsrc->stream_type == GST_APP_STREAM_TYPE_STREAM)
    return TRUE;

  g_signal_emit (appsrc, gst_app_src_signals[SIGNAL_SEEK_DATA], 0,
      desired_position, &res);

  if (res) {
    GST_DEBUG_OBJECT (appsrc, "flushing queue");
    gst_app_src_flush_queued (appsrc);
  } else {
    GST_WARNING_OBJECT (appsrc, "seek failed");
  }

  return res;
}
Exemple #14
0
void PlaybackPipeline::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus)
{
    WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;

    GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Have EOS");

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    bool allTracksConfigured = priv->allTracksConfigured;
    if (!allTracksConfigured)
        priv->allTracksConfigured = true;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    if (!allTracksConfigured) {
        gst_element_no_more_pads(GST_ELEMENT(m_webKitMediaSrc.get()));
        webKitMediaSrcDoAsyncDone(m_webKitMediaSrc.get());
    }

    Vector<GstAppSrc*> appsrcs;

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    for (Stream* stream : priv->streams) {
        if (stream->appsrc)
            appsrcs.append(GST_APP_SRC(stream->appsrc));
    }
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    for (GstAppSrc* appsrc : appsrcs)
        gst_app_src_end_of_stream(appsrc);
}
Exemple #15
0
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{

    CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");

    __BEGIN__;
    if (input_pix_fmt == 1) {
        if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
            CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
        }
    }
    else if (input_pix_fmt == 0) {
        if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
            CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
        }
    }
    else {
        assert(false);
    }
    int size;
    size = image->imageSize;
    buffer = gst_buffer_new_and_alloc (size);
    //gst_buffer_set_data (buffer,(guint8*)image->imageData, size);
    memcpy (GST_BUFFER_DATA(buffer),image->imageData, size);
    gst_app_src_push_buffer(GST_APP_SRC(source),buffer);
    //gst_buffer_unref(buffer);
    //buffer = 0;
    __END__;
    return true;
}
Exemple #16
0
void QGstAppSrc::pushDataToAppSrc()
{
    if (!isStreamValid() || !m_setup)
        return;

    if (m_dataRequested && !m_enoughData) {
        qint64 size;
        if (m_dataRequestSize == (unsigned int)-1)
            size = qMin(m_stream->bytesAvailable(), queueSize());
        else
            size = qMin(m_stream->bytesAvailable(), (qint64)m_dataRequestSize);
        void *data = g_malloc(size);
        GstBuffer* buffer = gst_app_buffer_new(data, size, g_free, data);
        buffer->offset = m_stream->pos();
        qint64 bytesRead = m_stream->read((char*)GST_BUFFER_DATA(buffer), size);
        buffer->offset_end =  buffer->offset + bytesRead - 1;

        if (bytesRead > 0) {
            m_dataRequested = false;
            m_enoughData = false;
            GstFlowReturn ret = gst_app_src_push_buffer (GST_APP_SRC (element()), buffer);
            if (ret == GST_FLOW_ERROR) {
                qWarning()<<"appsrc: push buffer error";
            } else if (ret == GST_FLOW_WRONG_STATE) {
                qWarning()<<"appsrc: push buffer wrong state";
            } else if (ret == GST_FLOW_RESEND) {
                qWarning()<<"appsrc: push buffer resend";
            }
        }
    } else if (m_stream->atEnd()) {
        sendEOS();
    }
}
Exemple #17
0
static gchar *
gst_app_src_uri_get_uri (GstURIHandler * handler)
{
  GstAppSrc *appsrc = GST_APP_SRC (handler);

  return appsrc->priv->uri ? g_strdup (appsrc->priv->uri) : NULL;
}
/*!
 * \brief CvVideoWriter_GStreamer::close
 * ends the pipeline by sending EOS and destroys the pipeline and all
 * elements afterwards
 */
void CvVideoWriter_GStreamer::close()
{
    if (pipeline)
    {
        gst_app_src_end_of_stream(GST_APP_SRC(source));

        //wait for EOS to trickle down the pipeline. This will let all elements finish properly
        GstBus* bus = gst_element_get_bus(pipeline);
        GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
        if(msg != NULL){
            gst_message_unref(msg);
            g_object_unref(G_OBJECT(bus));
        }

        gst_element_set_state (pipeline, GST_STATE_NULL);
        handleMessage(pipeline);

        gst_object_unref (GST_OBJECT (pipeline));

        if (source)
          gst_object_unref (GST_OBJECT (source));

        if (file)
          gst_object_unref (GST_OBJECT (file));
    }
}
static gboolean create_pipeline(SpiceGstDecoder *decoder)
{
    gchar *desc;
    gboolean auto_enabled;
    guint opt;
    GstAppSinkCallbacks appsink_cbs = { NULL };
    GError *err = NULL;
    GstBus *bus;

    auto_enabled = (g_getenv("SPICE_GSTVIDEO_AUTO") != NULL);
    if (auto_enabled || !VALID_VIDEO_CODEC_TYPE(decoder->base.codec_type)) {
        SPICE_DEBUG("Trying %s for codec type %d %s",
                    gst_opts[0].dec_name, decoder->base.codec_type,
                    (auto_enabled) ? "(SPICE_GSTVIDEO_AUTO is set)" : "");
        opt = 0;
    } else {
        opt = decoder->base.codec_type;
    }

    /* - We schedule the frame display ourselves so set sync=false on appsink
     *   so the pipeline decodes them as fast as possible. This will also
     *   minimize the risk of frames getting lost when we rebuild the
     *   pipeline.
     * - Set max-bytes=0 on appsrc so it does not drop frames that may be
     *   needed by those that follow.
     */
    desc = g_strdup_printf("appsrc name=src is-live=true format=time max-bytes=0 block=true "
                           "%s ! %s ! videoconvert ! appsink name=sink "
                           "caps=video/x-raw,format=BGRx sync=false drop=false",
                           gst_opts[opt].dec_caps, gst_opts[opt].dec_name);
    SPICE_DEBUG("GStreamer pipeline: %s", desc);

    decoder->pipeline = gst_parse_launch_full(desc, NULL, GST_PARSE_FLAG_FATAL_ERRORS, &err);
    g_free(desc);
    if (!decoder->pipeline) {
        spice_warning("GStreamer error: %s", err->message);
        g_clear_error(&err);
        return FALSE;
    }

    decoder->appsrc = GST_APP_SRC(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "src"));
    decoder->appsink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "sink"));

    appsink_cbs.new_sample = new_sample;
    gst_app_sink_set_callbacks(decoder->appsink, &appsink_cbs, decoder, NULL);
    bus = gst_pipeline_get_bus(GST_PIPELINE(decoder->pipeline));
    gst_bus_add_watch(bus, handle_pipeline_message, decoder);
    gst_object_unref(bus);

    decoder->clock = gst_pipeline_get_clock(GST_PIPELINE(decoder->pipeline));

    if (gst_element_set_state(decoder->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        SPICE_DEBUG("GStreamer error: Unable to set the pipeline to the playing state.");
        free_pipeline(decoder);
        return FALSE;
    }

    return TRUE;
}
MediaSourcePrivate::AddStatus PlaybackPipeline::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
{
    WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;

    if (priv->allTracksConfigured) {
        GST_ERROR_OBJECT(m_webKitMediaSrc.get(), "Adding new source buffers after first data not supported yet");
        return MediaSourcePrivate::NotSupported;
    }

    GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "State %d", (int)GST_STATE(m_webKitMediaSrc.get()));

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    guint numberOfStreams = g_list_length(priv->streams);
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    Stream* stream = g_new0(Stream, 1);
    GUniquePtr<gchar> srcName(g_strdup_printf("src%u", numberOfStreams));
    GUniquePtr<gchar> typefindName(g_strdup_printf("typefind%u", numberOfStreams));
    stream->parent = m_webKitMediaSrc.get();
    stream->appsrc = gst_element_factory_make("appsrc", srcName.get());
    stream->appSrcNeedDataFlag = false;
    stream->sourceBuffer = sourceBufferPrivate.get();

    // No track has been attached yet.
    stream->type = Invalid;
    stream->parser = nullptr;
    stream->caps = nullptr;
#if ENABLE(VIDEO_TRACK)
    stream->audioTrack = nullptr;
    stream->videoTrack = nullptr;
#endif
    stream->presentationSize = WebCore::FloatSize();

    gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &appsrcCallbacks, stream->parent, 0);
    gst_app_src_set_emit_signals(GST_APP_SRC(stream->appsrc), FALSE);
    gst_app_src_set_stream_type(GST_APP_SRC(stream->appsrc), GST_APP_STREAM_TYPE_SEEKABLE);

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    priv->streams = g_list_prepend(priv->streams, stream);
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    gst_bin_add(GST_BIN(m_webKitMediaSrc.get()), stream->appsrc);
    gst_element_sync_state_with_parent(stream->appsrc);

    return MediaSourcePrivate::Ok;
}
Exemple #21
0
void CvVideoWriter_GStreamer::close()
{
    if (pipeline) {
        gst_app_src_end_of_stream(GST_APP_SRC(source));
        gst_element_set_state (pipeline, GST_STATE_NULL);
        gst_object_unref (GST_OBJECT (pipeline));
    }
}
Exemple #22
0
static gboolean
gst_app_src_do_get_size (GstBaseSrc * src, guint64 * size)
{
  GstAppSrc *appsrc = GST_APP_SRC (src);

  *size = gst_app_src_get_size (appsrc);

  return TRUE;
}
Exemple #23
0
void GStreamerReader::PlayBinSourceSetupCb(GstElement* aPlayBin,
                                           GParamSpec* pspec,
                                           gpointer aUserData)
{
  GstElement *source;
  GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);

  g_object_get(aPlayBin, "source", &source, nullptr);
  reader->PlayBinSourceSetup(GST_APP_SRC(source));
}
Exemple #24
0
MediaSourcePrivate::AddStatus PlaybackPipeline::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate)
{
    WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv;

    if (priv->allTracksConfigured) {
        GST_ERROR_OBJECT(m_webKitMediaSrc.get(), "Adding new source buffers after first data not supported yet");
        return MediaSourcePrivate::NotSupported;
    }

    GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "State %d", int(GST_STATE(m_webKitMediaSrc.get())));

    Stream* stream = new Stream{ };
    stream->parent = m_webKitMediaSrc.get();
    stream->appsrc = gst_element_factory_make("appsrc", nullptr);
    stream->appsrcNeedDataFlag = false;
    stream->sourceBuffer = sourceBufferPrivate.get();

    // No track has been attached yet.
    stream->type = Invalid;
    stream->parser = nullptr;
    stream->caps = nullptr;
    stream->audioTrack = nullptr;
    stream->videoTrack = nullptr;
    stream->presentationSize = WebCore::FloatSize();
    stream->lastEnqueuedTime = MediaTime::invalidTime();

    gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &enabledAppsrcCallbacks, stream->parent, nullptr);
    gst_app_src_set_emit_signals(GST_APP_SRC(stream->appsrc), FALSE);
    gst_app_src_set_stream_type(GST_APP_SRC(stream->appsrc), GST_APP_STREAM_TYPE_SEEKABLE);

    gst_app_src_set_max_bytes(GST_APP_SRC(stream->appsrc), 2 * WTF::MB);
    g_object_set(G_OBJECT(stream->appsrc), "block", FALSE, "min-percent", 20, nullptr);

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    priv->streams.prepend(stream);
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    gst_bin_add(GST_BIN(m_webKitMediaSrc.get()), stream->appsrc);
    gst_element_sync_state_with_parent(stream->appsrc);

    return MediaSourcePrivate::Ok;
}
Exemple #25
0
static gboolean
gst_app_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
    GError ** error)
{
  GstAppSrc *appsrc = GST_APP_SRC (handler);

  g_free (appsrc->priv->uri);
  appsrc->priv->uri = uri ? g_strdup (uri) : NULL;

  return TRUE;
}
Exemple #26
0
static void
gst_app_src_finalize (GObject * obj)
{
  GstAppSrc *appsrc = GST_APP_SRC (obj);

  g_mutex_free (appsrc->priv->mutex);
  g_cond_free (appsrc->priv->cond);
  g_queue_free (appsrc->priv->queue);

  G_OBJECT_CLASS (parent_class)->finalize (obj);
}
Exemple #27
0
Player::Backend::Backend(Player * player):
    pipeline_(),
    appsrc_(),
    conv_(),
    audiosink_(),
    loop_(),
    push_id_(),
    bus_watch_id_(),
    player_(player) {
    if(!gst_is_initialized()) {
        GError *err;
        if(!gst_init_check(nullptr,nullptr,&err)) {
            std::exit(err->code);
        }
    }

    pipeline_ = gst_pipeline_new ("pipeline");

    if(pipeline_==nullptr) {
        std::exit(EXIT_FAILURE);
    };

    build_gst_element(appsrc_,"appsrc","source");
    build_gst_element(conv_,"audioconvert","conv");

    GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline_));
    bus_watch_id_ = gst_bus_add_watch (bus, wrap_bus_callback, this);
    gst_object_unref (bus);
    
    GstCaps * caps = gst_caps_new_simple(
        "audio/x-raw",
        "format", G_TYPE_STRING, format_,
        "rate", G_TYPE_INT, Config::sample_rate,
        "channels",G_TYPE_INT, Config::channels,
        "signed", G_TYPE_BOOLEAN, TRUE,
        "layout", G_TYPE_STRING, "interleaved",
        nullptr);

    g_object_set(G_OBJECT(appsrc_),"caps",caps,nullptr);
    g_object_set(G_OBJECT(appsrc_),"is-live",true,nullptr);
    g_object_set(G_OBJECT(appsrc_),"min-latency",0,nullptr);
    g_object_set(G_OBJECT(appsrc_),"emit-signals",false,nullptr);
    g_object_set(G_OBJECT(appsrc_),"format",GST_FORMAT_TIME,nullptr);

    //the gstreamer main loop is the main event loop for audio generation
    loop_ = g_main_loop_new (nullptr, FALSE);

    gst_bin_add_many (GST_BIN (pipeline_), appsrc_, conv_, nullptr);
    gst_element_link (appsrc_, conv_);

    GstAppSrcCallbacks callbacks = {wrap_need_data, wrap_enough_data, wrap_seek_data};
    gst_app_src_set_callbacks(GST_APP_SRC(appsrc_), &callbacks, this, nullptr);
}
Exemple #28
0
void MediaPlayer::setupSource()
{
    // If we got QIODevice, we use appsrc
    if ( m_mediaIODevice )
    {
        m_gst_source = createElement("appsrc", "source");

        if ( m_mediaIODevice->isSequential() )
        {
            gst_app_src_set_size( GST_APP_SRC(m_gst_source), -1 );
            gst_app_src_set_stream_type( GST_APP_SRC(m_gst_source), GST_APP_STREAM_TYPE_STREAM );
        }
        else
        {
            gst_app_src_set_size( GST_APP_SRC(m_gst_source), m_mediaIODevice->size() );
            gst_app_src_set_stream_type( GST_APP_SRC(m_gst_source), GST_APP_STREAM_TYPE_SEEKABLE );
        }

        GstAppSrcCallbacks callbacks = { 0, 0, 0, 0, 0 };
        callbacks.need_data = &MediaPlayer::cb_source_need_data;
        callbacks.seek_data = &MediaPlayer::cb_source_seek_data;

        gst_app_src_set_callbacks( GST_APP_SRC(m_gst_source), &callbacks, this, 0 );

        // Our sources have bytes format
        g_object_set( m_gst_source, "format", GST_FORMAT_BYTES, NULL);

        m_gst_decoder = createElement ("decodebin", "decoder");
    }
    else
    {
        // For a regular file we do not need appsrc and decodebin
        m_gst_source = createElement("uridecodebin", "source");

        // We already set decoder so it is not created
        m_gst_decoder = m_gst_source;

        g_object_set( m_gst_source, "uri", m_mediaFile.toUtf8().constData(), NULL);
    }
}
void VideoReceiver::consumeVideo(QByteArray *media)
{
  qDebug() << "In" << __FUNCTION__;

  GstBuffer *buffer = gst_buffer_new_and_alloc(media->length());

  // FIXME: zero copy?
  memcpy(GST_BUFFER_DATA(buffer), media->data(), media->length());

  if (gst_app_src_push_buffer(GST_APP_SRC(source), buffer) != GST_FLOW_OK) {
	qWarning("Error with gst_app_src_push_buffer");
  }
}
Exemple #30
0
void
shmdata_any_writer_push_data (shmdata_any_writer_t * context,
			      void *data,
			      int size,
			      unsigned long long timestamp,
			      void (*done_with_data) (void *),
			      void *user_data)
{
  GstBuffer *buf;
  buf = gst_app_buffer_new (data, size, done_with_data, user_data);
  GST_BUFFER_TIMESTAMP (buf) = (GstClockTime) (timestamp);
  gst_app_src_push_buffer (GST_APP_SRC (context->src_), buf);
}