void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate) { GST_DEBUG("Re-attaching track"); // FIXME: Maybe remove this method. Now the caps change is managed by gst_appsrc_push_sample() in enqueueSample() // and flushAndEnqueueNonDisplayingSamples(). WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); GST_OBJECT_LOCK(webKitMediaSrc); Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream && stream->type != Invalid); // The caps change is managed by gst_appsrc_push_sample() in enqueueSample() and // flushAndEnqueueNonDisplayingSamples(), so the caps aren't set from here. GRefPtr<GstCaps> appsrcCaps = adoptGRef(gst_app_src_get_caps(GST_APP_SRC(stream->appsrc))); const gchar* mediaType = gst_structure_get_name(gst_caps_get_structure(appsrcCaps.get(), 0)); int signal = -1; GST_OBJECT_LOCK(webKitMediaSrc); if (g_str_has_prefix(mediaType, "audio")) { ASSERT(stream->type == Audio); signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { ASSERT(stream->type == Video); signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { ASSERT(stream->type == Text); signal = SIGNAL_TEXT_CHANGED; // FIXME: Support text tracks. } GST_OBJECT_UNLOCK(webKitMediaSrc); if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr); }
void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps) { WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); Stream* stream = 0; //GstCaps* appsrccaps = 0; GstStructure* s = 0; const gchar* appsrctypename = 0; const gchar* mediaType = 0; gchar *parserBinName; bool capsNotifyHandlerConnected = false; unsigned padId = 0; GST_OBJECT_LOCK(webKitMediaSrc); stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream != 0); //gst_app_src_set_caps(GST_APP_SRC(stream->appsrc), caps); //appsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)); s = gst_caps_get_structure(caps, 0); appsrctypename = gst_structure_get_name(s); mediaType = appsrctypename; GST_OBJECT_LOCK(webKitMediaSrc); padId = stream->parent->priv->numberOfPads; stream->parent->priv->numberOfPads++; GST_OBJECT_UNLOCK(webKitMediaSrc); parserBinName = g_strdup_printf("streamparser%u", padId); ASSERT(caps != 0); stream->parser = gst_bin_new(parserBinName); g_free(parserBinName); GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s, caps=%" GST_PTR_FORMAT, trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType, caps); if (!g_strcmp0(mediaType, "video/x-h264")) { GstElement* parser; GstElement* capsfilter; GstPad* pad = nullptr; GstCaps* filtercaps; filtercaps = gst_caps_new_simple("video/x-h264", "alignment", G_TYPE_STRING, "au", NULL); parser = gst_element_factory_make("h264parse", 0); capsfilter = gst_element_factory_make("capsfilter", 0); g_object_set(capsfilter, "caps", filtercaps, NULL); gst_caps_unref(filtercaps); gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, NULL); gst_element_link_pads(parser, "src", capsfilter, "sink"); if (!pad) pad = gst_element_get_static_pad(parser, "sink"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); pad = gst_element_get_static_pad(capsfilter, "src"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad)); gst_object_unref(pad); } else if (!g_strcmp0(mediaType, "video/x-h265")) { GstElement* parser; GstElement* capsfilter; GstPad* pad = nullptr; GstCaps* filtercaps; filtercaps = gst_caps_new_simple("video/x-h265", "alignment", G_TYPE_STRING, "au", NULL); parser = gst_element_factory_make("h265parse", 0); capsfilter = gst_element_factory_make("capsfilter", 0); g_object_set(capsfilter, "caps", filtercaps, NULL); gst_caps_unref(filtercaps); gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, NULL); gst_element_link_pads(parser, "src", capsfilter, "sink"); if (!pad) pad = gst_element_get_static_pad(parser, "sink"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); pad = gst_element_get_static_pad(capsfilter, "src"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad)); gst_object_unref(pad); } else if (!g_strcmp0(mediaType, "audio/mpeg")) { gint mpegversion = -1; GstElement* parser; GstPad* pad = nullptr; gst_structure_get_int(s, "mpegversion", &mpegversion); if (mpegversion == 1) { parser = gst_element_factory_make("mpegaudioparse", 0); } else if (mpegversion == 2 || mpegversion == 4) { parser = gst_element_factory_make("aacparse", 0); } else { ASSERT_NOT_REACHED(); } gst_bin_add(GST_BIN(stream->parser), parser); if (!pad) pad = gst_element_get_static_pad(parser, "sink"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad)); gst_object_unref(pad); pad = gst_element_get_static_pad(parser, "src"); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad)); gst_object_unref(pad); } else { GST_ERROR_OBJECT(stream->parent, "Unsupported caps: %" GST_PTR_FORMAT, caps); gst_object_unref(GST_OBJECT(stream->parser)); return; } //gst_caps_unref(appsrccaps); GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Unknown; GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream->parser); gst_bin_add(GST_BIN(stream->parent), stream->parser); gst_element_sync_state_with_parent(stream->parser); GstPad* sinkpad = gst_element_get_static_pad(stream->parser, "sink"); GstPad* srcpad = gst_element_get_static_pad(stream->appsrc, "src"); gst_pad_link(srcpad, sinkpad); gst_object_unref(srcpad); srcpad = 0; gst_object_unref(sinkpad); sinkpad = 0; srcpad = gst_element_get_static_pad(stream->parser, "src"); // TODO: Is padId the best way to identify the Stream? What about trackId? g_object_set_data(G_OBJECT(srcpad), "id", GINT_TO_POINTER(padId)); if (!capsNotifyHandlerConnected) g_signal_connect(srcpad, "notify::caps", G_CALLBACK(webKitMediaSrcParserNotifyCaps), stream); webKitMediaSrcLinkStreamToSrcPad(srcpad, stream); ASSERT(stream->parent->priv->mediaPlayerPrivate); int signal = -1; if (g_str_has_prefix(mediaType, "audio")) { GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Audio; stream->parent->priv->nAudio++; GST_OBJECT_UNLOCK(webKitMediaSrc); signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Video; stream->parent->priv->nVideo++; GST_OBJECT_UNLOCK(webKitMediaSrc); signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Text; stream->parent->priv->nText++; GST_OBJECT_UNLOCK(webKitMediaSrc); signal = SIGNAL_TEXT_CHANGED; // TODO: Support text tracks. } if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webkit_media_src_signals[signal], 0, NULL); gst_object_unref(srcpad); srcpad = 0; }
void PlaybackPipeline::attachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstStructure* structure, GstCaps* caps) { WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); GST_OBJECT_LOCK(webKitMediaSrc); Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream); GST_OBJECT_LOCK(webKitMediaSrc); unsigned padId = stream->parent->priv->numberOfPads; stream->parent->priv->numberOfPads++; GST_OBJECT_UNLOCK(webKitMediaSrc); const gchar* mediaType = gst_structure_get_name(structure); GST_DEBUG_OBJECT(webKitMediaSrc, "Configured track %s: appsrc=%s, padId=%u, mediaType=%s", trackPrivate->id().string().utf8().data(), GST_ELEMENT_NAME(stream->appsrc), padId, mediaType); GUniquePtr<gchar> parserBinName(g_strdup_printf("streamparser%u", padId)); if (!g_strcmp0(mediaType, "video/x-h264")) { GRefPtr<GstCaps> filterCaps = adoptGRef(gst_caps_new_simple("video/x-h264", "alignment", G_TYPE_STRING, "au", nullptr)); GstElement* capsfilter = gst_element_factory_make("capsfilter", nullptr); g_object_set(capsfilter, "caps", filterCaps.get(), nullptr); stream->parser = gst_bin_new(parserBinName.get()); GstElement* parser = gst_element_factory_make("h264parse", nullptr); gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, nullptr); gst_element_link_pads(parser, "src", capsfilter, "sink"); GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink")); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get())); pad = adoptGRef(gst_element_get_static_pad(capsfilter, "src")); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get())); } else if (!g_strcmp0(mediaType, "video/x-h265")) { GRefPtr<GstCaps> filterCaps = adoptGRef(gst_caps_new_simple("video/x-h265", "alignment", G_TYPE_STRING, "au", nullptr)); GstElement* capsfilter = gst_element_factory_make("capsfilter", nullptr); g_object_set(capsfilter, "caps", filterCaps.get(), nullptr); stream->parser = gst_bin_new(parserBinName.get()); GstElement* parser = gst_element_factory_make("h265parse", nullptr); gst_bin_add_many(GST_BIN(stream->parser), parser, capsfilter, nullptr); gst_element_link_pads(parser, "src", capsfilter, "sink"); GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink")); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get())); pad = adoptGRef(gst_element_get_static_pad(capsfilter, "src")); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get())); } else if (!g_strcmp0(mediaType, "audio/mpeg")) { gint mpegversion = -1; gst_structure_get_int(structure, "mpegversion", &mpegversion); GstElement* parser = nullptr; if (mpegversion == 1) parser = gst_element_factory_make("mpegaudioparse", nullptr); else if (mpegversion == 2 || mpegversion == 4) parser = gst_element_factory_make("aacparse", nullptr); else ASSERT_NOT_REACHED(); stream->parser = gst_bin_new(parserBinName.get()); gst_bin_add(GST_BIN(stream->parser), parser); GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(parser, "sink")); gst_element_add_pad(stream->parser, gst_ghost_pad_new("sink", pad.get())); pad = adoptGRef(gst_element_get_static_pad(parser, "src")); gst_element_add_pad(stream->parser, gst_ghost_pad_new("src", pad.get())); } else if (!g_strcmp0(mediaType, "video/x-vp9")) stream->parser = nullptr; else { GST_ERROR_OBJECT(stream->parent, "Unsupported media format: %s", mediaType); return; } GST_OBJECT_LOCK(webKitMediaSrc); stream->type = Unknown; GST_OBJECT_UNLOCK(webKitMediaSrc); GRefPtr<GstPad> sourcePad; if (stream->parser) { gst_bin_add(GST_BIN(stream->parent), stream->parser); gst_element_sync_state_with_parent(stream->parser); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(stream->parser, "sink")); sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src")); gst_pad_link(sourcePad.get(), sinkPad.get()); sourcePad = adoptGRef(gst_element_get_static_pad(stream->parser, "src")); } else { GST_DEBUG_OBJECT(m_webKitMediaSrc.get(), "Stream of type %s doesn't require a parser bin", mediaType); sourcePad = adoptGRef(gst_element_get_static_pad(stream->appsrc, "src")); } ASSERT(sourcePad); // FIXME: Is padId the best way to identify the Stream? What about trackId? g_object_set_data(G_OBJECT(sourcePad.get()), "padId", GINT_TO_POINTER(padId)); webKitMediaSrcLinkParser(sourcePad.get(), caps, stream); ASSERT(stream->parent->priv->mediaPlayerPrivate); int signal = -1; GST_OBJECT_LOCK(webKitMediaSrc); if (g_str_has_prefix(mediaType, "audio")) { stream->type = Audio; stream->parent->priv->numberOfAudioStreams++; signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { stream->type = Video; stream->parent->priv->numberOfVideoStreams++; signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { stream->type = Text; stream->parent->priv->numberOfTextStreams++; signal = SIGNAL_TEXT_CHANGED; // FIXME: Support text tracks. } GST_OBJECT_UNLOCK(webKitMediaSrc); if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webKitMediaSrcSignals[signal], 0, nullptr); }
void PlaybackPipeline::reattachTrack(RefPtr<SourceBufferPrivateGStreamer> sourceBufferPrivate, RefPtr<TrackPrivateBase> trackPrivate, GstCaps* caps) { LOG_MEDIA_MESSAGE("Re-attaching track"); UNUSED_PARAM(caps); // TODO: Maybe remove this method. // Now the caps change is managed by gst_appsrc_push_sample() // in enqueueSample() and flushAndEnqueueNonDisplayingSamples(). WebKitMediaSrc* webKitMediaSrc = m_webKitMediaSrc.get(); GST_OBJECT_LOCK(webKitMediaSrc); Stream* stream = getStreamBySourceBufferPrivate(webKitMediaSrc, sourceBufferPrivate.get()); GST_OBJECT_UNLOCK(webKitMediaSrc); ASSERT(stream != 0); ASSERT(stream->type != Invalid); GstCaps* oldAppsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)); // Now the caps change is managed by gst_appsrc_push_sample() // in enqueueSample() and flushAndEnqueueNonDisplayingSamples(). // gst_app_src_set_caps(GST_APP_SRC(stream->appsrc), caps); GstCaps* appsrccaps = gst_app_src_get_caps(GST_APP_SRC(stream->appsrc)); const gchar* mediaType = gst_structure_get_name(gst_caps_get_structure(appsrccaps, 0)); if (!gst_caps_is_equal(oldAppsrccaps, appsrccaps)) { LOG_MEDIA_MESSAGE("Caps have changed, but reconstructing the sequence of elements is not supported yet"); gchar* stroldcaps = gst_caps_to_string(oldAppsrccaps); gchar* strnewcaps = gst_caps_to_string(appsrccaps); LOG_MEDIA_MESSAGE("oldcaps: %s", stroldcaps); LOG_MEDIA_MESSAGE("newcaps: %s", strnewcaps); g_free(stroldcaps); g_free(strnewcaps); } int signal = -1; GST_OBJECT_LOCK(webKitMediaSrc); if (g_str_has_prefix(mediaType, "audio")) { ASSERT(stream->type == Audio); signal = SIGNAL_AUDIO_CHANGED; stream->audioTrack = RefPtr<WebCore::AudioTrackPrivateGStreamer>(static_cast<WebCore::AudioTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "video")) { ASSERT(stream->type == Video); signal = SIGNAL_VIDEO_CHANGED; stream->videoTrack = RefPtr<WebCore::VideoTrackPrivateGStreamer>(static_cast<WebCore::VideoTrackPrivateGStreamer*>(trackPrivate.get())); } else if (g_str_has_prefix(mediaType, "text")) { ASSERT(stream->type == Text); signal = SIGNAL_TEXT_CHANGED; // TODO: Support text tracks and mediaTypes related to EME } GST_OBJECT_UNLOCK(webKitMediaSrc); gst_caps_unref(appsrccaps); gst_caps_unref(oldAppsrccaps); if (signal != -1) g_signal_emit(G_OBJECT(stream->parent), webkit_media_src_signals[signal], 0, NULL); }