void VideoPlayerBackend::clear() { if (m_sink) g_object_unref(m_sink); if (m_pipeline) { qDebug("gstreamer: Destroying pipeline"); /* Disable the message handlers to avoid anything calling back into this instance */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_disable_sync_message_emission(bus); gst_object_unref(bus); gst_element_set_state(m_pipeline, GST_STATE_NULL); /* Ensure the transition to NULL completes */ gst_element_get_state(m_pipeline, 0, 0, GST_CLOCK_TIME_NONE); gst_object_unref(GST_OBJECT(m_pipeline)); } m_pipeline = m_videoLink = m_sink = 0; setVideoBuffer(0); m_state = Stopped; m_errorMessage.clear(); }
bool MplVideoPlayerBackend::start(const QUrl &url) { qDebug() << "MplVideoPlayerBackend::start url =" << url << "\n"; if (state() == PermanentError) return false; if (m_wid.isNull() || m_wid.isEmpty()) { setError(false, tr("Window id is not set")); return false; } if (m_mplayer) m_mplayer->deleteLater(); createMplayerProcess(); /* Buffered HTTP source */ setVideoBuffer(new VideoHttpBuffer(url)); m_videoBuffer->startBuffering(); m_playbackSpeed = 1.0; qDebug() << "MplVideoPlayerBackend::start mplayer process started\n"; return true; }
void GstVideoPlayerBackend::clear() { if (m_sink) g_object_unref(m_sink); /* stream doesn't support audio. Audio elemets have been unlinked from bus, but they stil are in PAUSED state. * Set their state to null to avoid warning on disposing */ if (!m_hasAudio && m_audioDecoder) { gst_element_set_state(m_audioDecoder, GST_STATE_NULL); gst_element_set_state(m_audioQueue, GST_STATE_NULL); gst_element_set_state(m_audioLink, GST_STATE_NULL); gst_element_set_state(m_audioResample, GST_STATE_NULL); gst_element_set_state(m_volumeController, GST_STATE_NULL); gst_element_set_state(m_audioSink, GST_STATE_NULL); } if (m_pipeline) { qDebug("gstreamer: Destroying pipeline"); /* Disable the message handlers to avoid anything calling back into this instance */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_disable_sync_message_emission(bus); gst_object_unref(bus); gst_element_set_state(m_pipeline, GST_STATE_NULL); /* Ensure the transition to NULL completes */ gst_element_get_state(m_pipeline, 0, 0, GST_CLOCK_TIME_NONE); gst_object_unref(GST_OBJECT(m_pipeline)); } m_pipeline = m_videoLink = m_sink = m_audioLink = 0; m_audioDecoder = m_audioQueue = m_audioResample = m_volumeController = m_audioSink = 0; m_hasAudio = false; setVideoBuffer(0); m_state = Stopped; m_errorMessage.clear(); }
void MplVideoPlayerBackend::clear() { if (m_mplayer) { #ifdef Q_OS_MAC delete m_mplayer; #else m_mplayer->deleteLater(); #endif m_mplayer = 0; } setVideoBuffer(0); m_state = Stopped; m_errorMessage.clear(); }
bool GstVideoPlayerBackend::start(const QUrl &url) { Q_ASSERT(!m_pipeline); if (state() == PermanentError || m_pipeline) return false; if (!m_sink) { setError(true, QLatin1String("Internal error: improper usage")); return false; } /* Pipeline */ m_pipeline = gst_pipeline_new("stream"); if (!m_pipeline) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("stream"))); return false; } // TODO: uncomment when vaapi download starts works // enableFactory("vaapidecode", m_useHardwareDecoding); enableFactory("vaapidecode", false); /* Buffered HTTP source */ setVideoBuffer(new VideoHttpBuffer(url)); GstElement *source = m_videoBuffer->setupSrcElement(m_pipeline); if (!source) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("source"))); setVideoBuffer(0); return false; } m_videoBuffer->startBuffering(); GstElement *demuxer = gst_element_factory_make("matroskademux","avi-demuxer"); g_signal_connect(demuxer, "pad-added", G_CALLBACK(staticDemuxerPadReady), this); g_signal_connect(demuxer, "no-more-pads", G_CALLBACK(staticDemuxerNoMorePads), this); gst_bin_add(GST_BIN(m_pipeline), demuxer); if (!gst_element_link(source, demuxer)) { setError(true, tr("Failed to create pipeline (%1)").arg(QLatin1String("demuxer"))); return false; } if (!setupVideoPipeline() || !setupAudioPipeline()) return false; m_playbackSpeed = 1.0; /* We handle all messages in the sync handler, because we can't run a glib event loop. * Although linux does use glib's loop (and we could take advantage of that), it's better * to handle everything this way for windows and mac support. */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_enable_sync_message_emission(bus); gst_bus_set_sync_handler(bus, staticBusHandler, this); gst_object_unref(bus); /* Move the pipeline into the PLAYING state. This call may block for a very long time * (up to several seconds), because it will block until the pipeline has completed that move. */ gst_element_set_state(m_pipeline, GST_STATE_READY); return true; }
bool VideoPlayerBackend::start(const QUrl &url) { Q_ASSERT(!m_pipeline); if (state() == PermanentError || m_pipeline) return false; if (!m_sink) { setError(true, QLatin1String("Internal error: improper usage")); return false; } /* Pipeline */ m_pipeline = gst_pipeline_new("stream"); if (!m_pipeline) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("stream"))); return false; } /* Buffered HTTP source */ setVideoBuffer(new VideoHttpBuffer(url)); GstElement *source = m_videoBuffer->setupSrcElement(m_pipeline); if (!source) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("source"))); setVideoBuffer(0); return false; } m_videoBuffer->startBuffering(); /* Decoder */ GstElement *decoder = gst_element_factory_make("decodebin2", "decoder"); if (!decoder) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("decoder"))); return false; } g_object_set(G_OBJECT(decoder), "use-buffering", TRUE, "max-size-time", 10 * GST_SECOND, NULL); g_signal_connect(decoder, "new-decoded-pad", G_CALLBACK(staticDecodePadReady), this); /* Colorspace conversion (no-op if unnecessary) */ GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace"); if (!colorspace) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("colorspace"))); return false; } gst_bin_add_many(GST_BIN(m_pipeline), decoder, colorspace, m_sink, NULL); if (!gst_element_link(source, decoder)) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("link decoder"))); return false; } if (!gst_element_link(colorspace, m_sink)) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("link sink"))); return false; } /* This is the element that is linked to the decoder for video output; it will be linked when decodePadReady * gives us the video pad. */ m_videoLink = colorspace; m_playbackSpeed = 1.0; /* We handle all messages in the sync handler, because we can't run a glib event loop. * Although linux does use glib's loop (and we could take advantage of that), it's better * to handle everything this way for windows and mac support. */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_enable_sync_message_emission(bus); gst_bus_set_sync_handler(bus, staticBusHandler, this); gst_object_unref(bus); /* Move the pipeline into the PLAYING state. This call may block for a very long time * (up to several seconds), because it will block until the pipeline has completed that move. */ gst_element_set_state(m_pipeline, GST_STATE_READY); return true; }