static void kms_selectable_mixer_port_data_destroy (gpointer data) { KmsSelectableMixerPortData *port_data = (KmsSelectableMixerPortData *) data; KmsSelectableMixer *self = port_data->mixer; KMS_SELECTABLE_MIXER_LOCK (self); release_sink_pads (port_data->audiomixer); gst_bin_remove_many (GST_BIN (self), port_data->audio_agnostic, port_data->video_agnostic, port_data->audiomixer, NULL); KMS_SELECTABLE_MIXER_LOCK (self); gst_element_set_state (port_data->audiomixer, GST_STATE_NULL); gst_element_set_state (port_data->audio_agnostic, GST_STATE_NULL); gst_element_set_state (port_data->video_agnostic, GST_STATE_NULL); g_clear_object (&port_data->audiomixer); g_clear_object (&port_data->video_agnostic); g_clear_object (&port_data->audio_agnostic); g_slice_free (KmsSelectableMixerPortData, data); }
void GStreamerGWorld::removePlatformVideoSink() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GstElement* sinkPtr = 0; g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL); GRefPtr<GstElement> videoSink = adoptGRef(sinkPtr); GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee")); GRefPtr<GstElement> platformVideoSink = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink")); GRefPtr<GstElement> queue = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue")); GRefPtr<GstElement> colorspace = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace")); GRefPtr<GstElement> videoScale = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale")); GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(tee.get(), m_dynamicPadName.get())); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); gst_pad_unlink(srcPad.get(), sinkPad.get()); gst_element_release_request_pad(tee.get(), srcPad.get()); gst_element_unlink_many(queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_element_set_state(platformVideoSink.get(), GST_STATE_NULL); gst_element_set_state(videoScale.get(), GST_STATE_NULL); gst_element_set_state(colorspace.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); m_dynamicPadName.clear(); }
static gboolean remove_elements_from_pipeline (KmsAlphaBlendingData * port_data) { KmsAlphaBlending *self = port_data->mixer; GstElement *videoconvert, *videoscale, *videorate, *capsfilter, *queue, *videobox; KMS_ALPHA_BLENDING_LOCK (self); videobox = port_data->videobox; gst_element_unlink (videobox, self->priv->videomixer); if (port_data->video_mixer_pad != NULL) { gst_element_release_request_pad (self->priv->videomixer, port_data->video_mixer_pad); g_object_unref (port_data->video_mixer_pad); port_data->video_mixer_pad = NULL; } videoconvert = g_object_ref (port_data->videoconvert); videorate = g_object_ref (port_data->videorate); queue = g_object_ref (port_data->queue); videoscale = g_object_ref (port_data->videoscale); capsfilter = g_object_ref (port_data->capsfilter); g_object_ref (videobox); g_object_unref (port_data->videoconvert_sink_pad); port_data->videoconvert_sink_pad = NULL; port_data->videoconvert = NULL; port_data->videorate = NULL; port_data->queue = NULL; port_data->videoscale = NULL; port_data->capsfilter = NULL; port_data->videobox = NULL; gst_bin_remove_many (GST_BIN (self), videoconvert, videoscale, capsfilter, videorate, queue, videobox, NULL); kms_base_hub_unlink_video_src (KMS_BASE_HUB (self), port_data->id); KMS_ALPHA_BLENDING_UNLOCK (self); gst_element_set_state (videoconvert, GST_STATE_NULL); gst_element_set_state (videoscale, GST_STATE_NULL); gst_element_set_state (videorate, GST_STATE_NULL); gst_element_set_state (capsfilter, GST_STATE_NULL); gst_element_set_state (queue, GST_STATE_NULL); gst_element_set_state (videobox, GST_STATE_NULL); g_object_unref (videoconvert); g_object_unref (videoscale); g_object_unref (videorate); g_object_unref (capsfilter); g_object_unref (queue); g_object_unref (videobox); return G_SOURCE_REMOVE; }
static gboolean remove_elements_from_pipeline (KmsCompositeMixerData * port_data) { KmsCompositeMixer *self = port_data->mixer; KMS_COMPOSITE_MIXER_LOCK (self); gst_element_unlink (port_data->capsfilter, self->priv->videomixer); if (port_data->video_mixer_pad != NULL) { gst_element_release_request_pad (self->priv->videomixer, port_data->video_mixer_pad); g_object_unref (port_data->video_mixer_pad); port_data->video_mixer_pad = NULL; } g_object_unref (port_data->videoconvert_sink_pad); gst_bin_remove_many (GST_BIN (self), g_object_ref (port_data->input_capsfilter), g_object_ref (port_data->videoconvert), g_object_ref (port_data->videoscale), g_object_ref (port_data->capsfilter), g_object_ref (port_data->videorate), g_object_ref (port_data->queue), NULL); kms_base_hub_unlink_video_src (KMS_BASE_HUB (self), port_data->id); KMS_COMPOSITE_MIXER_UNLOCK (self); gst_element_set_state (port_data->input_capsfilter, GST_STATE_NULL); gst_element_set_state (port_data->videoconvert, GST_STATE_NULL); gst_element_set_state (port_data->videoscale, GST_STATE_NULL); gst_element_set_state (port_data->videorate, GST_STATE_NULL); gst_element_set_state (port_data->capsfilter, GST_STATE_NULL); gst_element_set_state (port_data->queue, GST_STATE_NULL); g_object_unref (port_data->input_capsfilter); g_object_unref (port_data->videoconvert); g_object_unref (port_data->videoscale); g_object_unref (port_data->videorate); g_object_unref (port_data->capsfilter); g_object_unref (port_data->queue); port_data->videoconvert_sink_pad = NULL; port_data->input_capsfilter = NULL; port_data->videoconvert = NULL; port_data->videoscale = NULL; port_data->capsfilter = NULL; port_data->videorate = NULL; port_data->queue = NULL; return G_SOURCE_REMOVE; }
void GstVideoPlayerBackend::demuxerNoMorePads(GstElement *demux) { Q_UNUSED(demux); qDebug("gstreamer: no more pads signal"); /* there are no audio stream. Unlink audio elements from pipepline * Without this pipeline hangs waiting for audio stream to show up */ if (!m_hasAudio) gst_bin_remove_many(GST_BIN(m_pipeline), m_audioQueue, m_audioDecoder, m_audioLink, m_audioResample, m_volumeController, m_audioSink, NULL); emit streamsInitialized(m_hasAudio); }
void GStreamerGWorld::exitFullscreen() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GOwnPtr<GstElement> videoSink; g_object_get(m_pipeline, "video-sink", &videoSink.outPtr(), NULL); GstElement* tee = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee"); GstElement* platformVideoSink = gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink"); GstElement* queue = gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue"); GstElement* colorspace = gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace"); GstElement* videoScale = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale"); // Get pads to unlink and remove. GstPad* srcPad = gst_element_get_static_pad(tee, m_dynamicPadName); GstPad* sinkPad = gst_element_get_static_pad(queue, "sink"); // Block data flow towards the pipeline branch to remove. No need // for pad blocking if the pipeline is paused. GstState state; gst_element_get_state(m_pipeline, &state, 0, 0); if (state < GST_STATE_PLAYING || gst_pad_set_blocked(srcPad, true)) { // Unlink and release request pad. gst_pad_unlink(srcPad, sinkPad); gst_element_release_request_pad(tee, srcPad); // Unlink, remove and cleanup queue, ffmpegcolorspace, videoScale and sink. gst_element_unlink_many(queue, colorspace, videoScale, platformVideoSink, NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue, colorspace, videoScale, platformVideoSink, NULL); gst_element_set_state(platformVideoSink, GST_STATE_NULL); gst_element_set_state(videoScale, GST_STATE_NULL); gst_element_set_state(colorspace, GST_STATE_NULL); gst_element_set_state(queue, GST_STATE_NULL); } gst_object_unref(GST_OBJECT(srcPad)); gst_object_unref(GST_OBJECT(sinkPad)); gst_object_unref(queue); gst_object_unref(colorspace); gst_object_unref(videoScale); gst_object_unref(platformVideoSink); gst_object_unref(tee); m_dynamicPadName = 0; }
void AudioSourceProviderGStreamer::handleRemovedDeinterleavePad(GstPad* pad) { m_deinterleaveSourcePads--; // Remove the queue ! appsink chain downstream of deinterleave. GQuark quark = g_quark_from_static_string("peer"); GstPad* sinkPad = reinterpret_cast<GstPad*>(g_object_get_qdata(G_OBJECT(pad), quark)); GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(sinkPad)); GRefPtr<GstPad> queueSrcPad = adoptGRef(gst_element_get_static_pad(queue.get(), "src")); GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_pad_get_peer(queueSrcPad.get())); GRefPtr<GstElement> sink = adoptGRef(gst_pad_get_parent_element(appsinkSinkPad.get())); gst_element_set_state(sink.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); gst_element_unlink(queue.get(), sink.get()); gst_bin_remove_many(GST_BIN(m_audioSinkBin.get()), queue.get(), sink.get(), nullptr); }
static gboolean remove_elements_from_pipeline (KmsCompositeMixerData * port_data) { KmsCompositeMixer *self = port_data->mixer; KMS_COMPOSITE_MIXER_LOCK (self); gst_element_unlink (port_data->capsfilter, self->priv->videomixer); if (port_data->latency_probe_id > 0) { gst_pad_remove_probe (port_data->video_mixer_pad, port_data->latency_probe_id); port_data->latency_probe_id = 0; } if (port_data->video_mixer_pad != NULL) { gst_element_release_request_pad (self->priv->videomixer, port_data->video_mixer_pad); g_object_unref (port_data->video_mixer_pad); port_data->video_mixer_pad = NULL; } gst_bin_remove_many (GST_BIN (self), g_object_ref (port_data->capsfilter), g_object_ref (port_data->tee), g_object_ref (port_data->fakesink), NULL); kms_base_hub_unlink_video_src (KMS_BASE_HUB (self), port_data->id); KMS_COMPOSITE_MIXER_UNLOCK (self); gst_element_set_state (port_data->capsfilter, GST_STATE_NULL); gst_element_set_state (port_data->tee, GST_STATE_NULL); gst_element_set_state (port_data->fakesink, GST_STATE_NULL); g_object_unref (port_data->capsfilter); g_object_unref (port_data->tee); g_object_unref (port_data->fakesink); g_object_unref (port_data->tee_sink_pad); port_data->tee_sink_pad = NULL; port_data->capsfilter = NULL; port_data->tee = NULL; port_data->fakesink = NULL; return G_SOURCE_REMOVE; }
static void uridecodebin_pad_removed_cb (GstElement * uridecodebin, GstPad * pad, GstDiscoverer * dc) { GList *tmp; PrivateStream *ps; GstPad *sinkpad; GST_DEBUG_OBJECT (dc, "pad %s:%s", GST_DEBUG_PAD_NAME (pad)); /* Find the PrivateStream */ DISCO_LOCK (dc); for (tmp = dc->priv->streams; tmp; tmp = tmp->next) { ps = (PrivateStream *) tmp->data; if (ps->pad == pad) break; } if (tmp == NULL) { DISCO_UNLOCK (dc); GST_DEBUG ("The removed pad wasn't controlled by us !"); return; } dc->priv->streams = g_list_delete_link (dc->priv->streams, tmp); DISCO_UNLOCK (dc); gst_element_set_state (ps->sink, GST_STATE_NULL); gst_element_set_state (ps->queue, GST_STATE_NULL); gst_element_unlink (ps->queue, ps->sink); sinkpad = gst_element_get_static_pad (ps->queue, "sink"); gst_pad_unlink (pad, sinkpad); gst_object_unref (sinkpad); /* references removed here */ gst_bin_remove_many (dc->priv->pipeline, ps->sink, ps->queue, NULL); if (ps->tags) { gst_tag_list_free (ps->tags); } g_slice_free (PrivateStream, ps); GST_DEBUG ("Done handling pad"); }
static void kms_dispatcher_port_data_destroy (gpointer data) { KmsDispatcherPortData *port_data = (KmsDispatcherPortData *) data; KmsDispatcher *self = port_data->dispatcher; KMS_DISPATCHER_LOCK (self); gst_bin_remove_many (GST_BIN (self), port_data->audio_agnostic, port_data->video_agnostic, NULL); KMS_DISPATCHER_UNLOCK (self); gst_element_set_state (port_data->audio_agnostic, GST_STATE_NULL); gst_element_set_state (port_data->video_agnostic, GST_STATE_NULL); g_clear_object (&port_data->audio_agnostic); g_clear_object (&port_data->video_agnostic); g_slice_free (KmsDispatcherPortData, data); }
void GStreamerGWorld::exitFullscreen() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GRefPtr<GstElement> videoSink; GstElement* sinkPtr = 0; g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL); videoSink = adoptGRef(sinkPtr); GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee")); GRefPtr<GstElement> platformVideoSink = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink")); GRefPtr<GstElement> queue = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue")); GRefPtr<GstElement> colorspace = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace")); GRefPtr<GstElement> videoScale = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale")); // Get pads to unlink and remove. GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(tee.get(), m_dynamicPadName.get())); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); // Block data flow towards the pipeline branch to remove. No need // for pad blocking if the pipeline is paused. GstState state; gst_element_get_state(m_pipeline, &state, 0, 0); if (state < GST_STATE_PLAYING || gst_pad_set_blocked(srcPad.get(), true)) { // Unlink and release request pad. gst_pad_unlink(srcPad.get(), sinkPad.get()); gst_element_release_request_pad(tee.get(), srcPad.get()); // Unlink, remove and cleanup queue, ffmpegcolorspace, videoScale and sink. gst_element_unlink_many(queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_element_set_state(platformVideoSink.get(), GST_STATE_NULL); gst_element_set_state(videoScale.get(), GST_STATE_NULL); gst_element_set_state(colorspace.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); } m_dynamicPadName.clear(); }
// ---------------------------------------------------------------------------- // Process the "pad-removed" signal from uridecodebin void GStreamerImportFileHandle::OnPadRemoved(GstPad *pad) { GStreamContext *c = GETCTX(pad); // Set audioconvert and appsink states to NULL gst_element_set_state(c->mSink, GST_STATE_NULL); gst_element_set_state(c->mConv, GST_STATE_NULL); // Unlink audioconvert -> appsink gst_element_unlink(c->mConv, c->mSink); // Remove the pads from the pipeilne gst_bin_remove_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL); // And reset context c->mConv = NULL; c->mSink = NULL; return; }
static void pad_added_cb (GstElement * element, GstPad * pad, gpointer data) { GstElement *pipeline = GST_ELEMENT (data); GstElement *wavenc, *sink; GstPadLinkReturn ret; GstPad *sinkpad; gchar *msg; if (gst_pad_get_direction (pad) != GST_PAD_SRC) return; wavenc = gst_element_factory_make ("wavenc", NULL); #ifdef MANUAL_CHECK { gchar *filename; G_LOCK (mutex); filename = g_strdup_printf ("file_%u.wv", id++); GST_DEBUG ("Creating file %s", filename); G_UNLOCK (mutex); sink = gst_element_factory_make ("filesink", NULL); g_object_set (G_OBJECT (sink), "location", filename, NULL); g_free (filename); } #else { sink = gst_element_factory_make ("fakesink", NULL); } #endif g_object_set (G_OBJECT (sink), "sync", FALSE, "async", FALSE, NULL); gst_bin_add_many (GST_BIN (pipeline), wavenc, sink, NULL); sinkpad = gst_element_get_static_pad (wavenc, "sink"); if ((ret = gst_pad_link (pad, sinkpad)) != GST_PAD_LINK_OK) { msg = g_strdup_printf ("Can not link pads (%d)", ret); gst_object_unref (sinkpad); goto failed; } gst_object_unref (sinkpad); if (!gst_element_link (wavenc, sink)) { msg = g_strdup_printf ("Can not link elements"); goto failed; } sinkpad = gst_element_get_static_pad (sink, "sink"); gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, buffer_probe_cb, NULL, NULL); gst_object_unref (sinkpad); gst_element_sync_state_with_parent (wavenc); gst_element_sync_state_with_parent (sink); G_LOCK (hash_mutex); g_hash_table_insert (hash, GST_OBJECT_NAME (pad), wavenc); G_UNLOCK (hash_mutex); return; failed: gst_element_set_state (wavenc, GST_STATE_NULL); gst_element_set_state (sink, GST_STATE_NULL); gst_bin_remove_many (GST_BIN (pipeline), wavenc, sink, NULL); GST_ERROR ("Error %s", msg); fail (msg); g_free (msg); g_idle_add ((GSourceFunc) quit_main_loop, NULL); }
/** * ges_timeline_pipeline_set_mode: * @pipeline: a #GESTimelinePipeline * @mode: the #GESPipelineFlags to use * * switches the @pipeline to the specified @mode. The default mode when * creating a #GESTimelinePipeline is #TIMELINE_MODE_PREVIEW. * * Note: The @pipeline will be set to #GST_STATE_NULL during this call due to * the internal changes that happen. The caller will therefore have to * set the @pipeline to the requested state after calling this method. * * Returns: %TRUE if the mode was properly set, else %FALSE. **/ gboolean ges_timeline_pipeline_set_mode (GESTimelinePipeline * pipeline, GESPipelineFlags mode) { GST_DEBUG_OBJECT (pipeline, "current mode : %d, mode : %d", pipeline->priv->mode, mode); /* fast-path, nothing to change */ if (mode == pipeline->priv->mode) return TRUE; /* FIXME: It would be nice if we are only (de)activating preview * modes to not set the whole pipeline to NULL, but instead just * do the proper (un)linking to playsink. */ /* Switch pipeline to NULL since we're changing the configuration */ gst_element_set_state (GST_ELEMENT_CAST (pipeline), GST_STATE_NULL); /* remove no-longer needed components */ if (pipeline->priv->mode & TIMELINE_MODE_PREVIEW && !(mode & TIMELINE_MODE_PREVIEW)) { /* Disable playsink */ GST_DEBUG ("Disabling playsink"); g_object_ref (pipeline->priv->playsink); gst_bin_remove (GST_BIN_CAST (pipeline), pipeline->priv->playsink); } if ((pipeline->priv->mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER)) && !(mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER))) { /* Disable render bin */ GST_DEBUG ("Disabling rendering bin"); g_object_ref (pipeline->priv->encodebin); g_object_ref (pipeline->priv->urisink); gst_bin_remove_many (GST_BIN_CAST (pipeline), pipeline->priv->encodebin, pipeline->priv->urisink, NULL); } /* Add new elements */ if (!(pipeline->priv->mode & TIMELINE_MODE_PREVIEW) && (mode & TIMELINE_MODE_PREVIEW)) { /* Add playsink */ GST_DEBUG ("Adding playsink"); if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->playsink)) { GST_ERROR_OBJECT (pipeline, "Couldn't add playsink"); return FALSE; } } if (!(pipeline->priv->mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER)) && (mode & (TIMELINE_MODE_RENDER | TIMELINE_MODE_SMART_RENDER))) { /* Adding render bin */ GST_DEBUG ("Adding render bin"); if (G_UNLIKELY (pipeline->priv->urisink == NULL)) { GST_ERROR_OBJECT (pipeline, "Output URI not set !"); return FALSE; } if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->encodebin)) { GST_ERROR_OBJECT (pipeline, "Couldn't add encodebin"); return FALSE; } if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->urisink)) { GST_ERROR_OBJECT (pipeline, "Couldn't add URI sink"); return FALSE; } g_object_set (pipeline->priv->encodebin, "avoid-reencoding", !(!(mode & TIMELINE_MODE_SMART_RENDER)), NULL); gst_element_link_pads_full (pipeline->priv->encodebin, "src", pipeline->priv->urisink, "sink", GST_PAD_LINK_CHECK_NOTHING); } /* FIXUPS */ /* FIXME * If we are rendering, set playsink to sync=False, * If we are NOT rendering, set playsink to sync=TRUE */ pipeline->priv->mode = mode; return TRUE; }
void FarsightChannel::CreateVideoInputElement(const QString &video_src_name) { video_input_bin_ = gst_bin_new("video-input-bin"); if (!video_input_bin_) { QString error_message("Cannot create bin for video input"); LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } GstElement *scale = gst_element_factory_make("videoscale", NULL); if (!scale) { QString error_message = "Cannot create scale element for video input"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } GstElement *rate = gst_element_factory_make("videorate", NULL); if (!rate) { QString error_message = "Cannot create rate element for video input"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", NULL); if (!colorspace) { QString error_message = "Cannot create colorspace element for video input"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } GstElement *capsfilter = gst_element_factory_make("capsfilter", NULL); if (!capsfilter) { QString error_message = "Cannot create capsfilter element for video input"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } GstCaps *caps = gst_caps_new_simple("video/x-raw-yuv", //GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", "width", G_TYPE_INT, 320, "height", G_TYPE_INT, 240, NULL); g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL); video_input_ = gst_element_factory_make(video_src_name.toStdString().c_str(), NULL); if (!video_input_) { QString error_message = "Cannot create video src element for video input"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } // QString test = gst_element_get_name(video_input_); gst_bin_add_many(GST_BIN(video_input_bin_), video_input_, scale, rate, colorspace, capsfilter, NULL); bool ok = gst_element_link_many(video_input_, scale, rate, colorspace, capsfilter, NULL); if (!ok) { QString error_message = "Cannot link: video_input_ ! scale ! rate ! colorspace ! capsfilter "; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } GstPad *src = gst_element_get_static_pad(capsfilter, "src"); GstPad *ghost = gst_ghost_pad_new("src", src); if (!ghost || !src) { QString error_message = "Cannot create ghost bad for video_input_bin_"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } ok = gst_element_add_pad(GST_ELEMENT(video_input_bin_), ghost); if (!ok) { QString error_message = "Cannot add ghost pad to video_input_bin_"; LogError(error_message.toStdString()); throw(Exception(error_message.toStdString().c_str())); } gst_object_unref(G_OBJECT(src)); gst_object_ref(video_input_bin_); gst_object_sink(video_input_bin_); video_tee_ = setUpElement("tee"); if (GST_ELEMENT(locally_captured_video_playback_element_)) { gst_bin_add_many(GST_BIN(pipeline_), video_input_bin_, video_tee_, locally_captured_video_playback_element_, NULL); ok = gst_element_link_many(video_input_bin_, video_tee_, locally_captured_video_playback_element_, NULL); if (!ok) { QString error_message = "Cannot link: video_input_bin_ ! video_tee_ ! locally_captured_video_playback_element_"; LogError(error_message.toStdString()); gst_bin_remove_many(GST_BIN(pipeline_), video_input_bin_, video_tee_, locally_captured_video_playback_element_, NULL); throw(Exception(error_message.toStdString().c_str())); } } else { QString error_message = "locally_captured_video_playback_element_ is NULL"; LogError(error_message.toStdString()); gst_bin_add_many(GST_BIN(pipeline_), video_input_bin_, video_tee_, NULL); ok = gst_element_link_many(video_input_bin_, video_tee_, NULL); if (!ok) { QString error_message = "Cannot link: video_input_bin_ ! video_tee_ "; LogError(error_message.toStdString()); gst_bin_remove_many(GST_BIN(pipeline_), video_input_bin_, video_tee_, NULL); throw(Exception(error_message.toStdString().c_str())); } } }
/** * ges_pipeline_set_mode: * @pipeline: a #GESPipeline * @mode: the #GESPipelineFlags to use * * switches the @pipeline to the specified @mode. The default mode when * creating a #GESPipeline is #GES_PIPELINE_MODE_PREVIEW. * * Note: The @pipeline will be set to #GST_STATE_NULL during this call due to * the internal changes that happen. The caller will therefore have to * set the @pipeline to the requested state after calling this method. * * Returns: %TRUE if the mode was properly set, else %FALSE. **/ gboolean ges_pipeline_set_mode (GESPipeline * pipeline, GESPipelineFlags mode) { GList *tmp; g_return_val_if_fail (GES_IS_PIPELINE (pipeline), FALSE); GST_DEBUG_OBJECT (pipeline, "current mode : %d, mode : %d", pipeline->priv->mode, mode); /* fast-path, nothing to change */ if (mode == pipeline->priv->mode) return TRUE; /* FIXME: It would be nice if we are only (de)activating preview * modes to not set the whole pipeline to NULL, but instead just * do the proper (un)linking to playsink. */ /* Switch pipeline to NULL since we're changing the configuration */ gst_element_set_state (GST_ELEMENT_CAST (pipeline), GST_STATE_NULL); if (pipeline->priv->timeline) { gboolean disabled = ! !(mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER)); for (tmp = pipeline->priv->timeline->tracks; tmp; tmp = tmp->next) track_disable_last_gap (GES_TRACK (tmp->data), disabled); } /* remove no-longer needed components */ if (pipeline->priv->mode & GES_PIPELINE_MODE_PREVIEW && !(mode & GES_PIPELINE_MODE_PREVIEW)) { /* Disable playsink */ GST_DEBUG ("Disabling playsink"); gst_object_ref (pipeline->priv->playsink); gst_bin_remove (GST_BIN_CAST (pipeline), pipeline->priv->playsink); } if ((pipeline->priv->mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER)) && !(mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER))) { GList *tmp; GstCaps *caps; for (tmp = pipeline->priv->timeline->tracks; tmp; tmp = tmp->next) { GESTrackType type = GES_TRACK (tmp->data)->type; if (type == GES_TRACK_TYPE_AUDIO) caps = gst_caps_new_empty_simple ("audio/x-raw"); else if (type == GES_TRACK_TYPE_VIDEO) caps = gst_caps_new_empty_simple ("video/x-raw"); else continue; ges_track_set_caps (GES_TRACK (tmp->data), caps); gst_caps_unref (caps); } /* Disable render bin */ GST_DEBUG ("Disabling rendering bin"); gst_object_ref (pipeline->priv->encodebin); gst_object_ref (pipeline->priv->urisink); gst_bin_remove_many (GST_BIN_CAST (pipeline), pipeline->priv->encodebin, pipeline->priv->urisink, NULL); } /* Add new elements */ if (!(pipeline->priv->mode & GES_PIPELINE_MODE_PREVIEW) && (mode & GES_PIPELINE_MODE_PREVIEW)) { /* Add playsink */ GST_DEBUG ("Adding playsink"); if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->playsink)) { GST_ERROR_OBJECT (pipeline, "Couldn't add playsink"); return FALSE; } } if (!(pipeline->priv->mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER)) && (mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER))) { /* Adding render bin */ GST_DEBUG ("Adding render bin"); if (G_UNLIKELY (pipeline->priv->urisink == NULL)) { GST_ERROR_OBJECT (pipeline, "Output URI not set !"); return FALSE; } if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->encodebin)) { GST_ERROR_OBJECT (pipeline, "Couldn't add encodebin"); return FALSE; } if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->urisink)) { GST_ERROR_OBJECT (pipeline, "Couldn't add URI sink"); return FALSE; } g_object_set (pipeline->priv->encodebin, "avoid-reencoding", !(!(mode & GES_PIPELINE_MODE_SMART_RENDER)), NULL); gst_element_link_pads_full (pipeline->priv->encodebin, "src", pipeline->priv->urisink, "sink", GST_PAD_LINK_CHECK_NOTHING); } /* FIXUPS */ /* FIXME * If we are rendering, set playsink to sync=False, * If we are NOT rendering, set playsink to sync=TRUE */ pipeline->priv->mode = mode; return TRUE; }
static void remove_agnostic_bin (GstElement * agnosticbin) { KmsAudioMixer *self; GstElement *audiorate = NULL, *typefind = NULL; GstPad *sinkpad, *peerpad; self = (KmsAudioMixer *) gst_element_get_parent (agnosticbin); if (self == NULL) { GST_WARNING_OBJECT (agnosticbin, "No parent element"); return; } sinkpad = gst_element_get_static_pad (agnosticbin, "sink"); peerpad = gst_pad_get_peer (sinkpad); if (peerpad == NULL) { GST_WARNING_OBJECT (sinkpad, "Not linked"); gst_object_unref (sinkpad); goto end; } audiorate = gst_pad_get_parent_element (peerpad); gst_object_unref (sinkpad); gst_object_unref (peerpad); if (audiorate == NULL) { GST_WARNING_OBJECT (self, "No audiorate"); goto end; } sinkpad = gst_element_get_static_pad (audiorate, "sink"); peerpad = gst_pad_get_peer (sinkpad); if (peerpad == NULL) { GST_WARNING_OBJECT (sinkpad, "Not linked"); gst_object_unref (sinkpad); goto end; } typefind = gst_pad_get_parent_element (peerpad); gst_object_unref (sinkpad); gst_object_unref (peerpad); if (typefind == NULL) { GST_WARNING_OBJECT (self, "No typefind"); goto end; } gst_element_unlink_many (typefind, audiorate, agnosticbin, NULL); gst_element_set_locked_state (typefind, TRUE); gst_element_set_locked_state (audiorate, TRUE); gst_element_set_locked_state (agnosticbin, TRUE); gst_element_set_state (typefind, GST_STATE_NULL); gst_element_set_state (audiorate, GST_STATE_NULL); gst_element_set_state (agnosticbin, GST_STATE_NULL); gst_object_ref (agnosticbin); gst_bin_remove_many (GST_BIN (self), typefind, audiorate, agnosticbin, NULL); gst_object_unref (agnosticbin); end: if (audiorate != NULL) { gst_object_unref (audiorate); } if (typefind != NULL) { gst_object_unref (typefind); } gst_object_unref (self); }
// ---------------------------------------------------------------------------- // Import streams int GStreamerImportFileHandle::Import(TrackFactory *trackFactory, Track ***outTracks, int *outNumTracks, Tags *tags) { // Save track factory pointer mTrackFactory = trackFactory; // Create the progrress dialog CreateProgress(); // Block streams that are to be bypassed g_mutex_lock(&mStreamsLock); bool haveStreams = false; for (guint i = 0; i < mStreams->len; i++) { GStreamContext *c = (GStreamContext *) g_ptr_array_index(mStreams, i); // Did the user choose to skip this stream? if (!c->mUse) { // Get the audioconvert sink pad and unlink GstPad *convsink = gst_element_get_static_pad(c->mConv, "sink"); GstPad *convpeer = gst_pad_get_peer(convsink); gst_pad_unlink(convpeer, convsink); gst_object_unref(convpeer); // Set bitbucket callbacks so the prerolled sample won't get processed // when we change the state to PLAYING gst_app_sink_set_callbacks(GST_APP_SINK(c->mSink), &AppSinkBitBucket, this, NULL); // Set state to playing for conv and sink so EOS gets processed gst_element_set_state(c->mConv, GST_STATE_PLAYING); gst_element_set_state(c->mSink, GST_STATE_PLAYING); // Send an EOS event to the pad to force them to drain gst_pad_send_event(convsink, gst_event_new_eos()); // Resync state with pipeline gst_element_sync_state_with_parent(c->mConv); gst_element_sync_state_with_parent(c->mSink); // Done with the pad gst_object_unref(convsink); // Unlink audioconvert and appsink gst_element_unlink(c->mConv, c->mSink); // Remove them from the bin gst_bin_remove_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL); // All done with them c->mConv = NULL; c->mSink = NULL; continue; } // We have a stream to process haveStreams = true; } g_mutex_unlock(&mStreamsLock); // Can't do much if we don't have any streams to process if (!haveStreams) { wxMessageBox(wxT("File doesn't contain any audio streams."), wxT("GStreamer Importer")); return eProgressFailed; } // Get the ball rolling... GstStateChangeReturn state = gst_element_set_state(mPipeline, GST_STATE_PLAYING); if (state == GST_STATE_CHANGE_FAILURE) { wxMessageBox(wxT("Unable to import file, state change failed."), wxT("GStreamer Importer")); return eProgressFailed; } // Get the duration of the stream gint64 duration; gst_element_query_duration(mPipeline, GST_FORMAT_TIME, &duration); // Handle bus messages and update progress while files is importing bool success = true; int updateResult = eProgressSuccess; while (ProcessBusMessage(success) && success && updateResult == eProgressSuccess) { gint64 position; // Update progress indicator and give user chance to abort if (gst_element_query_position(mPipeline, GST_FORMAT_TIME, &position)) { updateResult = mProgress->Update((wxLongLong_t) position, (wxLongLong_t) duration); } } // Disable pipeline gst_element_set_state(mPipeline, GST_STATE_NULL); // Something bad happened if (!success || updateResult == eProgressFailed || updateResult == eProgressCancelled) { return updateResult; } // Grah the streams lock g_mutex_lock(&mStreamsLock); // Count the total number of tracks collected *outNumTracks = 0; for (guint s = 0; s < mStreams->len; s++) { GStreamContext *c = (GStreamContext*)g_ptr_array_index(mStreams, s); if (c->mChannels) { *outNumTracks += c->mNumChannels; } } // Create new tracks *outTracks = new Track *[*outNumTracks]; // Copy audio from mChannels to newly created tracks (destroying mChannels in process) int trackindex = 0; for (guint s = 0; s < mStreams->len; s++) { GStreamContext *c = (GStreamContext*)g_ptr_array_index(mStreams, s); if (c->mChannels) { for (int ch = 0; ch < c->mNumChannels; ch++) { c->mChannels[ch]->Flush(); (*outTracks)[trackindex++] = c->mChannels[ch]; } delete [] c->mChannels; c->mChannels = NULL; } } g_mutex_unlock(&mStreamsLock); // Set any tags found in the stream *tags = mTags; return updateResult; }