void Pipeline::setVideoEffect(const QString &value) { Effect *newEffect = EffectManager::instance()->getEffect(value); // close valves g_object_set(effectValve, "drop", TRUE, NULL); // unlink current effect, remove and destroy it gst_element_unlink_many(effectCapsFilter, effect, effectPostCS, NULL); g_object_ref(effect); gst_bin_remove(GST_BIN(effectInternalBin), effect); gst_element_set_state(effect, GST_STATE_NULL); g_object_unref(GST_OBJECT(effect)); effect = gst_parse_bin_from_description(newEffect->desc().toUtf8(), TRUE, NULL); // add new effect to the bin and link it gst_bin_add(GST_BIN(effectInternalBin), effect); gst_element_link_many(effectCapsFilter, effect, effectPostCS, NULL); gst_element_set_state(effectInternalBin, GST_STATE_READY); gst_element_set_state(effectInternalBin, GST_STATE_PAUSED); //open valve g_object_set(effectValve, "drop", FALSE, NULL); }
void GStreamerGWorld::removePlatformVideoSink() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GstElement* sinkPtr = 0; g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL); GRefPtr<GstElement> videoSink = adoptGRef(sinkPtr); GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee")); GRefPtr<GstElement> platformVideoSink = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink")); GRefPtr<GstElement> queue = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue")); GRefPtr<GstElement> colorspace = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace")); GRefPtr<GstElement> videoScale = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale")); GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(tee.get(), m_dynamicPadName.get())); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); gst_pad_unlink(srcPad.get(), sinkPad.get()); gst_element_release_request_pad(tee.get(), srcPad.get()); gst_element_unlink_many(queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_element_set_state(platformVideoSink.get(), GST_STATE_NULL); gst_element_set_state(videoScale.get(), GST_STATE_NULL); gst_element_set_state(colorspace.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); m_dynamicPadName.clear(); }
///////////////////////////////////////////////////////////////////////////////// // Unlinks all elements // bool CDFKAFU050::UnlinkPipeline() { #ifdef USE_AFU050 gst_element_set_state (_Pipeline.pipeline, GST_STATE_PAUSED); gst_element_set_state (_Pipeline.pipeline, GST_STATE_NULL); g_main_loop_quit (_Pipeline.loop); // Pipeline to the tee. Does the camera control and color image. gst_element_unlink_many(GetElement("appsrc"), GetElement("jpegdec"), GetElement("cogcolorspace"), GetElement("tee"), NULL); gst_element_unlink_many(GetElement("queue1"), GetElement("videoscale1"), GetElement("filter"), GetElement("ximagesink"), NULL); gst_element_unlink_many(GetElement("queue2"), GetElement("image_sink"), NULL); gst_pad_unlink(tee_q1_pad, q1_pad); gst_pad_unlink(tee_q2_pad, q2_pad); gst_object_unref(GetElement("appsrc")); gst_object_unref(GetElement("filter")); gst_object_unref(GetElement("jpegdec")); gst_object_unref(GetElement("tee")); gst_object_unref(GetElement("ximagesink")); gst_object_unref(GetElement("queue1")); gst_object_unref(GetElement("queue2")); gst_object_unref(GetElement("videoscale1")); gst_object_unref(GetElement("image_sink")); // clean up and delete the pipeline gst_object_unref (GST_OBJECT (_Pipeline.pipeline)); g_source_remove (_Pipeline.bus_watch_id); #endif return true; }
void GStreamerGWorld::exitFullscreen() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GOwnPtr<GstElement> videoSink; g_object_get(m_pipeline, "video-sink", &videoSink.outPtr(), NULL); GstElement* tee = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee"); GstElement* platformVideoSink = gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink"); GstElement* queue = gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue"); GstElement* colorspace = gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace"); GstElement* videoScale = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale"); // Get pads to unlink and remove. GstPad* srcPad = gst_element_get_static_pad(tee, m_dynamicPadName); GstPad* sinkPad = gst_element_get_static_pad(queue, "sink"); // Block data flow towards the pipeline branch to remove. No need // for pad blocking if the pipeline is paused. GstState state; gst_element_get_state(m_pipeline, &state, 0, 0); if (state < GST_STATE_PLAYING || gst_pad_set_blocked(srcPad, true)) { // Unlink and release request pad. gst_pad_unlink(srcPad, sinkPad); gst_element_release_request_pad(tee, srcPad); // Unlink, remove and cleanup queue, ffmpegcolorspace, videoScale and sink. gst_element_unlink_many(queue, colorspace, videoScale, platformVideoSink, NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue, colorspace, videoScale, platformVideoSink, NULL); gst_element_set_state(platformVideoSink, GST_STATE_NULL); gst_element_set_state(videoScale, GST_STATE_NULL); gst_element_set_state(colorspace, GST_STATE_NULL); gst_element_set_state(queue, GST_STATE_NULL); } gst_object_unref(GST_OBJECT(srcPad)); gst_object_unref(GST_OBJECT(sinkPad)); gst_object_unref(queue); gst_object_unref(colorspace); gst_object_unref(videoScale); gst_object_unref(platformVideoSink); gst_object_unref(tee); m_dynamicPadName = 0; }
void GStreamerGWorld::exitFullscreen() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GRefPtr<GstElement> videoSink; GstElement* sinkPtr = 0; g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL); videoSink = adoptGRef(sinkPtr); GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee")); GRefPtr<GstElement> platformVideoSink = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink")); GRefPtr<GstElement> queue = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue")); GRefPtr<GstElement> colorspace = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace")); GRefPtr<GstElement> videoScale = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale")); // Get pads to unlink and remove. GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(tee.get(), m_dynamicPadName.get())); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); // Block data flow towards the pipeline branch to remove. No need // for pad blocking if the pipeline is paused. GstState state; gst_element_get_state(m_pipeline, &state, 0, 0); if (state < GST_STATE_PLAYING || gst_pad_set_blocked(srcPad.get(), true)) { // Unlink and release request pad. gst_pad_unlink(srcPad.get(), sinkPad.get()); gst_element_release_request_pad(tee.get(), srcPad.get()); // Unlink, remove and cleanup queue, ffmpegcolorspace, videoScale and sink. gst_element_unlink_many(queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_element_set_state(platformVideoSink.get(), GST_STATE_NULL); gst_element_set_state(videoScale.get(), GST_STATE_NULL); gst_element_set_state(colorspace.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); } m_dynamicPadName.clear(); }
static void remove_agnostic_bin (GstElement * agnosticbin) { KmsAudioMixer *self; GstElement *audiorate = NULL, *typefind = NULL; GstPad *sinkpad, *peerpad; self = (KmsAudioMixer *) gst_element_get_parent (agnosticbin); if (self == NULL) { GST_WARNING_OBJECT (agnosticbin, "No parent element"); return; } sinkpad = gst_element_get_static_pad (agnosticbin, "sink"); peerpad = gst_pad_get_peer (sinkpad); if (peerpad == NULL) { GST_WARNING_OBJECT (sinkpad, "Not linked"); gst_object_unref (sinkpad); goto end; } audiorate = gst_pad_get_parent_element (peerpad); gst_object_unref (sinkpad); gst_object_unref (peerpad); if (audiorate == NULL) { GST_WARNING_OBJECT (self, "No audiorate"); goto end; } sinkpad = gst_element_get_static_pad (audiorate, "sink"); peerpad = gst_pad_get_peer (sinkpad); if (peerpad == NULL) { GST_WARNING_OBJECT (sinkpad, "Not linked"); gst_object_unref (sinkpad); goto end; } typefind = gst_pad_get_parent_element (peerpad); gst_object_unref (sinkpad); gst_object_unref (peerpad); if (typefind == NULL) { GST_WARNING_OBJECT (self, "No typefind"); goto end; } gst_element_unlink_many (typefind, audiorate, agnosticbin, NULL); gst_element_set_locked_state (typefind, TRUE); gst_element_set_locked_state (audiorate, TRUE); gst_element_set_locked_state (agnosticbin, TRUE); gst_element_set_state (typefind, GST_STATE_NULL); gst_element_set_state (audiorate, GST_STATE_NULL); gst_element_set_state (agnosticbin, GST_STATE_NULL); gst_object_ref (agnosticbin); gst_bin_remove_many (GST_BIN (self), typefind, audiorate, agnosticbin, NULL); gst_object_unref (agnosticbin); end: if (audiorate != NULL) { gst_object_unref (audiorate); } if (typefind != NULL) { gst_object_unref (typefind); } gst_object_unref (self); }