static void gst_app_src_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstAppSrc *appsrc = GST_APP_SRC (object); switch (prop_id) { case PROP_CAPS: { GstCaps *caps; /* we're missing a _take_caps() function to transfer ownership */ caps = gst_app_src_get_caps (appsrc); gst_value_set_caps (value, caps); if (caps) gst_caps_unref (caps); break; } case PROP_SIZE: g_value_set_int64 (value, gst_app_src_get_size (appsrc)); break; case PROP_STREAM_TYPE: g_value_set_enum (value, gst_app_src_get_stream_type (appsrc)); break; case PROP_MAX_BYTES: g_value_set_uint64 (value, gst_app_src_get_max_bytes (appsrc)); break; case PROP_FORMAT: g_value_set_enum (value, appsrc->priv->format); break; case PROP_BLOCK: g_value_set_boolean (value, appsrc->priv->block); break; case PROP_IS_LIVE: g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (appsrc))); break; case PROP_MIN_LATENCY: { guint64 min; gst_app_src_get_latency (appsrc, &min, NULL); g_value_set_int64 (value, min); break; } case PROP_MAX_LATENCY: { guint64 max; gst_app_src_get_latency (appsrc, &max, NULL); g_value_set_int64 (value, max); break; } case PROP_EMIT_SIGNALS: g_value_set_boolean (value, gst_app_src_get_emit_signals (appsrc)); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_app_src_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstAppSrc *appsrc = GST_APP_SRC_CAST (object); GstAppSrcPrivate *priv = appsrc->priv; switch (prop_id) { case PROP_CAPS: g_value_take_boxed (value, gst_app_src_get_caps (appsrc)); break; case PROP_SIZE: g_value_set_int64 (value, gst_app_src_get_size (appsrc)); break; case PROP_STREAM_TYPE: g_value_set_enum (value, gst_app_src_get_stream_type (appsrc)); break; case PROP_MAX_BYTES: g_value_set_uint64 (value, gst_app_src_get_max_bytes (appsrc)); break; case PROP_FORMAT: g_value_set_enum (value, priv->format); break; case PROP_BLOCK: g_value_set_boolean (value, priv->block); break; case PROP_IS_LIVE: g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (appsrc))); break; case PROP_MIN_LATENCY: { guint64 min; gst_app_src_get_latency (appsrc, &min, NULL); g_value_set_int64 (value, min); break; } case PROP_MAX_LATENCY: { guint64 max; gst_app_src_get_latency (appsrc, NULL, &max); g_value_set_int64 (value, max); break; } case PROP_EMIT_SIGNALS: g_value_set_boolean (value, gst_app_src_get_emit_signals (appsrc)); break; case PROP_MIN_PERCENT: g_value_set_uint (value, priv->min_percent); break; case PROP_CURRENT_LEVEL_BYTES: g_value_set_uint64 (value, gst_app_src_get_current_level_bytes (appsrc)); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
CapsPtr ApplicationSource::caps() const { CapsPtr c; if (d->appSrc()) { c = CapsPtr::wrap(gst_app_src_get_caps(d->appSrc()), false); } return c; }
void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate) { GST_DEBUG("Re-attaching track"); // FIXME: Maybe remove this method. Now the caps change is managed by gst_appsrc_push_sample() in enqueueSample() // and flushAndEnqueueNonDisplayingSamples(). WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); GST_OBJECT_LOCK(webKitMediaSrc); Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream && stream->type != Invalid); // The caps change is managed by gst_appsrc_push_sample() in enqueueSample() and // flushAndEnqueueNonDisplayingSamples(), so the caps aren't set from here. GRefPtr<GstCaps> appsrcCaps = adoptGRef(gst_app_src_get_caps(GST_APP_SRC(stream->appsrc))); const gchar* mediaType = gst_structure_get_name(gst_caps_get_structure(appsrcCaps.get(), 0)); int signal = -1; GST_OBJECT_LOCK(webKitMediaSrc); if (g_str_has_prefix(mediaType, "audio")) { ASSERT(stream->type == Audio); signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { ASSERT(stream->type == Video); signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { ASSERT(stream->type == Text); signal = SIGNAL_TEXT_CHANGED; // FIXME: Support text tracks. } GST_OBJECT_UNLOCK(webKitMediaSrc); if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr); }
static void gst_app_src_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { GstAppSrc *appsrc = GST_APP_SRC (object); switch (prop_id) { case PROP_CAPS: { GstCaps *caps; /* we're missing a _take_caps() function to transfer ownership */ caps = gst_app_src_get_caps (appsrc); gst_value_set_caps (value, caps); if (caps) gst_caps_unref (caps); break; } case PROP_SIZE: g_value_set_int64 (value, gst_app_src_get_size (appsrc)); break; case PROP_STREAM_TYPE: g_value_set_enum (value, gst_app_src_get_stream_type (appsrc)); break; case PROP_MAX_BYTES: g_value_set_uint64 (value, gst_app_src_get_max_bytes (appsrc)); break; case PROP_FORMAT: g_value_set_enum (value, appsrc->format); break; case PROP_BLOCK: g_value_set_boolean (value, appsrc->block); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps) { LOG_MEDIA_MESSAGE("Re-attaching track"); UNUSED_PARAM(caps); // TODO: Maybe remove this method. // Now the caps change is managed by gst_appsrc_push_sample() // in enqueueSample() and flushAndEnqueueNonDisplayingSamples(). WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); GST_OBJECT_LOCK(webKitMediaSrc); Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream != 0); ASSERT(stream->type != Invalid); GstCaps* oldAppsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)); // Now the caps change is managed by gst_appsrc_push_sample() // in enqueueSample() and flushAndEnqueueNonDisplayingSamples(). // gst_app_src_set_caps(GST_APP_SRC(stream->appsrc), caps); GstCaps* appsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)); const gchar* mediaType = gst_structure_get_name(gst_caps_get_structure(appsrccaps, 0)); if (!gst_caps_is_equal(oldAppsrccaps, appsrccaps)) { LOG_MEDIA_MESSAGE("Caps have changed, but reconstructing the sequence of elements is not supported yet"); gchar* stroldcaps = gst_caps_to_string(oldAppsrccaps); gchar* strnewcaps = gst_caps_to_string(appsrccaps); LOG_MEDIA_MESSAGE("oldcaps: %s", stroldcaps); LOG_MEDIA_MESSAGE("newcaps: %s", strnewcaps); g_free(stroldcaps); g_free(strnewcaps); } int signal = -1; GST_OBJECT_LOCK(webKitMediaSrc); if (g_str_has_prefix(mediaType, "audio")) { ASSERT(stream->type == Audio); signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { ASSERT(stream->type == Video); signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { ASSERT(stream->type == Text); signal = SIGNAL_TEXT_CHANGED; // TODO: Support text tracks and mediaTypes related to EME } GST_OBJECT_UNLOCK(webKitMediaSrc); gst_caps_unref(appsrccaps); gst_caps_unref(oldAppsrccaps); if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webkit_media_src_signals[signal], 0, NULL); }