void ApplicationSource::setMaxBytes(quint64 max) { d->lazyConstruct(this); if (d->appSrc()) { gst_app_src_set_max_bytes(d->appSrc(), max); } }
GstElement *VideoHttpBuffer::setupSrcElement(GstElement *pipeline) { Q_ASSERT(!m_element && !m_pipeline); if (m_element || m_pipeline) { if (m_pipeline == pipeline) return GST_ELEMENT(m_element); return 0; } m_element = GST_APP_SRC(gst_element_factory_make("appsrc", "source")); if (!m_element) return 0; g_object_ref(m_element); m_pipeline = pipeline; gst_app_src_set_max_bytes(m_element, 0); gst_app_src_set_stream_type(m_element, GST_APP_STREAM_TYPE_RANDOM_ACCESS); GstAppSrcCallbacks callbacks; memset(&callbacks, 0, sizeof(callbacks)); callbacks.need_data = needDataWrap; callbacks.seek_data = (gboolean (*)(GstAppSrc*,guint64,void*))seekDataWrap; gst_app_src_set_callbacks(m_element, &callbacks, this, 0); if (media && media->fileSize()) gst_app_src_set_size(m_element, media->fileSize()); gst_bin_add(GST_BIN(m_pipeline), GST_ELEMENT(m_element)); return GST_ELEMENT(m_element); }
static void gst_app_src_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstAppSrc *appsrc = GST_APP_SRC (object); switch (prop_id) { case PROP_CAPS: gst_app_src_set_caps (appsrc, gst_value_get_caps (value)); break; case PROP_SIZE: gst_app_src_set_size (appsrc, g_value_get_int64 (value)); break; case PROP_STREAM_TYPE: gst_app_src_set_stream_type (appsrc, g_value_get_enum (value)); break; case PROP_MAX_BYTES: gst_app_src_set_max_bytes (appsrc, g_value_get_uint64 (value)); break; case PROP_FORMAT: appsrc->format = g_value_get_enum (value); break; case PROP_BLOCK: appsrc->block = g_value_get_boolean (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_app_src_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstAppSrc *appsrc = GST_APP_SRC_CAST (object); GstAppSrcPrivate *priv = appsrc->priv; switch (prop_id) { case PROP_CAPS: gst_app_src_set_caps (appsrc, gst_value_get_caps (value)); break; case PROP_SIZE: gst_app_src_set_size (appsrc, g_value_get_int64 (value)); break; case PROP_STREAM_TYPE: gst_app_src_set_stream_type (appsrc, g_value_get_enum (value)); break; case PROP_MAX_BYTES: gst_app_src_set_max_bytes (appsrc, g_value_get_uint64 (value)); break; case PROP_FORMAT: priv->format = g_value_get_enum (value); break; case PROP_BLOCK: priv->block = g_value_get_boolean (value); break; case PROP_IS_LIVE: gst_base_src_set_live (GST_BASE_SRC (appsrc), g_value_get_boolean (value)); break; case PROP_MIN_LATENCY: gst_app_src_set_latencies (appsrc, TRUE, g_value_get_int64 (value), FALSE, -1); break; case PROP_MAX_LATENCY: gst_app_src_set_latencies (appsrc, FALSE, -1, TRUE, g_value_get_int64 (value)); break; case PROP_EMIT_SIGNALS: gst_app_src_set_emit_signals (appsrc, g_value_get_boolean (value)); break; case PROP_MIN_PERCENT: priv->min_percent = g_value_get_uint (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
/* C920 Live Src Instance Initializer * ---------------------------------- * Set AppSrc Callbacks and initialize default settings */ static void c920_live_src_init(C920LiveSrc *self) { GstAppSrc *base = GST_APP_SRC(self); self->priv = C920_LIVE_SRC_GET_PRIVATE(self); self->priv->cb.need_data = c920_live_src_need_data; self->priv->cb.enough_data = c920_live_src_enough_data; self->priv->cb.seek_data = NULL; gst_app_src_set_latency(base, 100, 1000); gst_app_src_set_size(base, -1); gst_app_src_set_stream_type(base, GST_APP_STREAM_TYPE_STREAM); gst_app_src_set_max_bytes(base, 2 * 1024 * 1024); gst_app_src_set_callbacks(base, &self->priv->cb, NULL, NULL); }
MediaSourcePrivate::AddStatus PlaybackPipeline::addSourceBuffer(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate) { WebKitMediaSrcPrivate* priv = m_webKitMediaSrc->priv; if (priv->allTracksConfigured) { GST_ERROR_OBJECT(m_webKitMediaSrc.get(), "Adding new source buffers after first data not supported yet"); return MediaSourcePrivate::NotSupported; } GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "State %d", int(GST_STATE(m_webKitMediaSrc.get()))); Stream* stream = new Stream{ }; stream->parent = m_webKitMediaSrc.get(); stream->appsrc = gst_element_factory_make("appsrc", nullptr); stream->appsrcNeedDataFlag = false; stream->sourceBuffer = sourceBufferPrivate.get(); // No track has been attached yet. stream->type = Invalid; stream->parser = nullptr; stream->caps = nullptr; stream->audioTrack = nullptr; stream->videoTrack = nullptr; stream->presentationSize = WebCore::FloatSize(); stream->lastEnqueuedTime = MediaTime::invalidTime(); gst_app_src_set_callbacks(GST_APP_SRC(stream->appsrc), &enabledAppsrcCallbacks, stream->parent, nullptr); gst_app_src_set_emit_signals(GST_APP_SRC(stream->appsrc), FALSE); gst_app_src_set_stream_type(GST_APP_SRC(stream->appsrc), GST_APP_STREAM_TYPE_SEEKABLE); gst_app_src_set_max_bytes(GST_APP_SRC(stream->appsrc), 2 * WTF::MB); g_object_set(G_OBJECT(stream->appsrc), "block", FALSE, "min-percent", 20, nullptr); GST_OBJECT_LOCK(m_webKitMediaSrc.get()); priv->streams.prepend(stream); GST_OBJECT_UNLOCK(m_webKitMediaSrc.get()); gst_bin_add(GST_BIN(m_webKitMediaSrc.get()), stream->appsrc); gst_element_sync_state_with_parent(stream->appsrc); return MediaSourcePrivate::Ok; }
static gboolean byzanz_encoder_gstreamer_run (ByzanzEncoder * encoder, GInputStream * input, GOutputStream * output, gboolean record_audio, GCancellable * cancellable, GError ** error) { ByzanzEncoderGStreamer *gstreamer = BYZANZ_ENCODER_GSTREAMER (encoder); ByzanzEncoderGStreamerClass *klass = BYZANZ_ENCODER_GSTREAMER_GET_CLASS (encoder); GstElement *sink; guint width, height; GstMessage *message; GstBus *bus; if (!byzanz_deserialize_header (input, &width, &height, cancellable, error)) return FALSE; gstreamer->surface = cairo_image_surface_create (CAIRO_FORMAT_RGB24, width, height); g_assert (klass->pipeline_string); if (record_audio) { if (klass->audio_pipeline_string == NULL) { g_set_error_literal (error, G_IO_ERROR, G_IO_ERROR_FAILED, _("This format does not support recording audio.")); return FALSE; } gstreamer->pipeline = gst_parse_launch (klass->audio_pipeline_string, error); gstreamer->audiosrc = gst_bin_get_by_name (GST_BIN (gstreamer->pipeline), "audiosrc"); g_assert (gstreamer->audiosrc); } else { gstreamer->pipeline = gst_parse_launch (klass->pipeline_string, error); } if (gstreamer->pipeline == NULL) return FALSE; g_assert (GST_IS_PIPELINE (gstreamer->pipeline)); gstreamer->src = GST_APP_SRC (gst_bin_get_by_name (GST_BIN (gstreamer->pipeline), "src")); g_assert (GST_IS_APP_SRC (gstreamer->src)); sink = gst_bin_get_by_name (GST_BIN (gstreamer->pipeline), "sink"); g_assert (sink); g_object_set (sink, "stream", output, NULL); g_object_unref (sink); gstreamer->caps = gst_caps_new_simple ("video/x-raw", #if G_BYTE_ORDER == G_LITTLE_ENDIAN "format", G_TYPE_STRING, "BGRx", #elif G_BYTE_ORDER == G_BIG_ENDIAN "format", G_TYPE_STRING, "xRGB", #else #error "Please add the Cairo caps format here" #endif "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); g_assert (gst_caps_is_fixed (gstreamer->caps)); gst_app_src_set_caps (gstreamer->src, gstreamer->caps); gst_app_src_set_callbacks (gstreamer->src, &callbacks, gstreamer, NULL); gst_app_src_set_stream_type (gstreamer->src, GST_APP_STREAM_TYPE_STREAM); gst_app_src_set_max_bytes (gstreamer->src, 0); g_object_set (gstreamer->src, "format", GST_FORMAT_TIME, NULL); if (!gst_element_set_state (gstreamer->pipeline, GST_STATE_PLAYING)) { g_set_error_literal (error, G_IO_ERROR, G_IO_ERROR_FAILED, _("Failed to start GStreamer pipeline")); return FALSE; } bus = gst_pipeline_get_bus (GST_PIPELINE (gstreamer->pipeline)); message = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); g_object_unref (bus); gst_element_set_state (gstreamer->pipeline, GST_STATE_NULL); if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR) { gst_message_parse_error (message, error, NULL); gst_message_unref (message); return FALSE; } gst_message_unref (message); return TRUE; }
BOOL tsmf_gstreamer_pipeline_build(TSMFGstreamerDecoder* mdecoder) { #if GST_VERSION_MAJOR > 0 const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin name=videodecoder !"; const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !"; #else const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin2 name=videodecoder !"; const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin2 name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !"; #endif char pipeline[1024]; if (!mdecoder) return FALSE; /* TODO: Construction of the pipeline from a string allows easy overwrite with arguments. * The only fixed elements necessary are appsrc and the volume element for audio streams. * The rest could easily be provided in gstreamer pipeline notation from command line. */ if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO) sprintf_s(pipeline, sizeof(pipeline), "%s %s name=videosink", video, tsmf_platform_get_video_sink()); else sprintf_s(pipeline, sizeof(pipeline), "%s %s name=audiosink", audio, tsmf_platform_get_audio_sink()); DEBUG_TSMF("pipeline=%s", pipeline); mdecoder->pipe = gst_parse_launch(pipeline, NULL); if (!mdecoder->pipe) { WLog_ERR(TAG, "Failed to create new pipe"); return FALSE; } if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO) mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosource"); else mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosource"); if (!mdecoder->src) { WLog_ERR(TAG, "Failed to get appsrc"); return FALSE; } if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO) mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videoqueue"); else mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audioqueue"); if (!mdecoder->queue) { WLog_ERR(TAG, "Failed to get queue"); return FALSE; } if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO) mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosink"); else mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosink"); if (!mdecoder->outsink) { WLog_ERR(TAG, "Failed to get sink"); return FALSE; } g_signal_connect(mdecoder->outsink, "child-added", G_CALLBACK(cb_child_added), mdecoder); if (mdecoder->media_type == TSMF_MAJOR_TYPE_AUDIO) { mdecoder->volume = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiovolume"); if (!mdecoder->volume) { WLog_ERR(TAG, "Failed to get volume"); return FALSE; } tsmf_gstreamer_change_volume((ITSMFDecoder*)mdecoder, mdecoder->gstVolume*((double) 10000), mdecoder->gstMuted); } tsmf_platform_register_handler(mdecoder); /* AppSrc settings */ GstAppSrcCallbacks callbacks = { tsmf_gstreamer_need_data, tsmf_gstreamer_enough_data, tsmf_gstreamer_seek_data }; g_object_set(mdecoder->src, "format", GST_FORMAT_TIME, NULL); g_object_set(mdecoder->src, "is-live", FALSE, NULL); g_object_set(mdecoder->src, "block", FALSE, NULL); g_object_set(mdecoder->src, "blocksize", 1024, NULL); gst_app_src_set_caps((GstAppSrc *) mdecoder->src, mdecoder->gst_caps); gst_app_src_set_callbacks((GstAppSrc *)mdecoder->src, &callbacks, mdecoder, NULL); gst_app_src_set_stream_type((GstAppSrc *) mdecoder->src, GST_APP_STREAM_TYPE_SEEKABLE); gst_app_src_set_latency((GstAppSrc *) mdecoder->src, 0, -1); gst_app_src_set_max_bytes((GstAppSrc *) mdecoder->src, (guint64) 0);//unlimited g_object_set(G_OBJECT(mdecoder->queue), "use-buffering", FALSE, NULL); g_object_set(G_OBJECT(mdecoder->queue), "use-rate-estimate", FALSE, NULL); g_object_set(G_OBJECT(mdecoder->queue), "max-size-buffers", 0, NULL); g_object_set(G_OBJECT(mdecoder->queue), "max-size-bytes", 0, NULL); g_object_set(G_OBJECT(mdecoder->queue), "max-size-time", (guint64) 0, NULL); /* Only set these properties if not an autosink, otherwise we will set properties when real sinks are added */ if (!g_strcmp0(G_OBJECT_TYPE_NAME(mdecoder->outsink), "GstAutoVideoSink") && !g_strcmp0(G_OBJECT_TYPE_NAME(mdecoder->outsink), "GstAutoAudioSink")) { if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO) { gst_base_sink_set_max_lateness((GstBaseSink *) mdecoder->outsink, 10000000); /* nanoseconds */ } else { gst_base_sink_set_max_lateness((GstBaseSink *) mdecoder->outsink, 10000000); /* nanoseconds */ g_object_set(G_OBJECT(mdecoder->outsink), "buffer-time", (gint64) 20000, NULL); /* microseconds */ g_object_set(G_OBJECT(mdecoder->outsink), "drift-tolerance", (gint64) 20000, NULL); /* microseconds */ g_object_set(G_OBJECT(mdecoder->outsink), "latency-time", (gint64) 10000, NULL); /* microseconds */ g_object_set(G_OBJECT(mdecoder->outsink), "slave-method", 1, NULL); } g_object_set(G_OBJECT(mdecoder->outsink), "sync", TRUE, NULL); /* synchronize on the clock */ g_object_set(G_OBJECT(mdecoder->outsink), "async", TRUE, NULL); /* no async state changes */ } tsmf_window_create(mdecoder); tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_READY); tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING); mdecoder->pipeline_start_time_valid = 0; mdecoder->shutdown = 0; mdecoder->paused = FALSE; GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(mdecoder->pipe), GST_DEBUG_GRAPH_SHOW_ALL, get_type(mdecoder)); return TRUE; }