コード例 #1
0
ファイル: refpointertest.cpp プロジェクト: xohm/qt-gstreamer
void RefPointerTest::refTest1()
{
    GstObject *bin = GST_OBJECT(gst_object_ref(GST_OBJECT(gst_bin_new(NULL))));
    gst_object_sink(bin);
    QGst::ObjectPtr object = QGst::ObjectPtr::wrap(bin, false);
    QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin), 1);
}
コード例 #2
0
ファイル: x11renderer.cpp プロジェクト: FilipBE/qtextended
GstElement* X11Renderer::createVideoSink()
{
    GstElement *videoSink = gst_element_factory_make ("xvimagesink", NULL);
    if (videoSink) {
        // Check if the xv sink is usable
        if (gst_element_set_state(videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
            gst_object_unref(GST_OBJECT(videoSink));
            videoSink = 0;
        } else {
            // Note that this should not really be neccessary as these are 
            // default values, though under certain conditions values are retained
            // even between application instances. (reproducible on 0.10.16/Gutsy)
            g_object_set(G_OBJECT(videoSink), "brightness", 0, NULL);
            g_object_set(G_OBJECT(videoSink), "contrast", 0, NULL);
            g_object_set(G_OBJECT(videoSink), "hue", 0, NULL);
            g_object_set(G_OBJECT(videoSink), "saturation", 0, NULL);
        }
    }

    if (!videoSink)
        videoSink = gst_element_factory_make ("ximagesink", NULL);

    gst_object_ref (GST_OBJECT (videoSink)); //Take ownership
    gst_object_sink (GST_OBJECT (videoSink));

    return videoSink;
}
コード例 #3
0
void QGstreamerVideoWidgetControl::createVideoWidget()
{
    if (m_widget)
        return;

    m_widget = new QGstreamerVideoWidget;

    m_widget->installEventFilter(this);
    m_windowId = m_widget->winId();

    m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
    if (m_videoSink) {
        // Check if the xv sink is usable
        if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
            gst_object_unref(GST_OBJECT(m_videoSink));
            m_videoSink = 0;
        } else {
            gst_element_set_state(m_videoSink, GST_STATE_NULL);

            g_object_set(G_OBJECT(m_videoSink), "force-aspect-ratio", 1, (const char*)NULL);
        }
    }

    if (!m_videoSink)
        m_videoSink = gst_element_factory_make ("ximagesink", NULL);

    gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
    gst_object_sink (GST_OBJECT (m_videoSink));


}
コード例 #4
0
ファイル: FarsightChannel.cpp プロジェクト: caocao/naali
 GstElement* FarsightChannel::setUpElement(const QString &element_name)
 {
     GstElement* element = gst_element_factory_make(element_name.toStdString().c_str(), NULL);
     gst_object_ref(element);
     gst_object_sink(element);
     return element;
 }
コード例 #5
0
template <> GstPadTemplate* refGPtr<GstPadTemplate>(GstPadTemplate* ptr)
{
    if (ptr) {
        gst_object_ref(GST_OBJECT(ptr));
        gst_object_sink(GST_OBJECT(ptr));
    }
    return ptr;
}
コード例 #6
0
ファイル: GStreamerVersioning.cpp プロジェクト: dog-god/iptv
void webkitGstObjectRefSink(GstObject* gstObject)
{
#ifdef GST_API_VERSION_1
    gst_object_ref_sink(gstObject);
#else
    gst_object_ref(gstObject);
    gst_object_sink(gstObject);
#endif
}
コード例 #7
0
template <> GstElement* refGPtr<GstElement>(GstElement* ptr)
{
    if (ptr) {
        gst_object_ref(GST_OBJECT(ptr));
        gst_object_sink(GST_OBJECT(ptr));
    }

    return ptr;
}
コード例 #8
0
template <> GstTask* refGPtr<GstTask>(GstTask* ptr)
{
    if (ptr) {
        gst_object_ref(GST_OBJECT(ptr));
        gst_object_sink(GST_OBJECT(ptr));
    }

    return ptr;
}
コード例 #9
0
ファイル: rbgst-object.c プロジェクト: benolee/ruby-gnome2
void
rbgst_object_initialize(VALUE obj, gpointer cobj)
{
    if (GST_OBJECT_IS_FLOATING(cobj)) {
        gst_object_ref(cobj);
        gst_object_sink(cobj);
    }

    rbgobj_gobject_initialize(obj, cobj);
}
コード例 #10
0
GstElement *QGstreamerVideoRenderer::videoSink()
{
    if (!m_videoSink && m_surface) {
        m_videoSink = QVideoSurfaceGstSink::createSink(m_surface);
        gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
        gst_object_sink(GST_OBJECT(m_videoSink));
    }

    return reinterpret_cast<GstElement*>(m_videoSink);
}
コード例 #11
0
ファイル: rbgst-object.c プロジェクト: benolee/ruby-gnome2
VALUE
rbgst_object_instance2robj(gpointer instance)
{
    if (GST_OBJECT_IS_FLOATING(instance)) {
        gst_object_ref(instance);
        gst_object_sink(instance);
    }

    return rbgobj_get_ruby_object_from_gobject(instance, TRUE);
}
コード例 #12
0
static void
empathy_video_widget_constructed (GObject *object)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (object);
  GstElement *colorspace, *videoscale, *sink;
  GstPad *pad;

  priv->videosink = gst_bin_new (NULL);

  gst_object_ref (priv->videosink);
  gst_object_sink (priv->videosink);

  priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink");

  sink = gst_element_factory_make ("gconfvideosink", NULL);
  g_assert (sink != NULL);

  videoscale = gst_element_factory_make ("videoscale", NULL);
  g_assert (videoscale != NULL);

  g_object_set (videoscale, "qos", FALSE, NULL);

  colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
  g_assert (colorspace != NULL);

  g_object_set (colorspace, "qos", FALSE, NULL);

  gst_bin_add_many (GST_BIN (priv->videosink), colorspace, videoscale,
    sink, NULL);

  if (!gst_element_link (colorspace, videoscale))
    g_error ("Failed to link ffmpegcolorspace and videoscale");

  if (!gst_element_link (videoscale, sink))
    g_error ("Failed to link videoscale and gconfvideosink");

  pad = gst_element_get_static_pad (colorspace, "sink");
  g_assert (pad != NULL);

  priv->sink_pad = gst_ghost_pad_new ("sink", pad);
  if (!gst_element_add_pad  (priv->videosink, priv->sink_pad))
    g_error ("Couldn't add sink ghostpad to the bin");

  gst_object_unref (pad);

  fs_element_added_notifier_add (priv->notifier, GST_BIN (priv->videosink));
  gst_bus_enable_sync_message_emission (priv->bus);

  g_signal_connect (priv->bus, "sync-message",
    G_CALLBACK (empathy_video_widget_sync_message_cb), object);

  gtk_widget_set_size_request (GTK_WIDGET (object), priv->min_width,
    priv->min_height);
}
コード例 #13
0
ファイル: FarsightChannel.cpp プロジェクト: caocao/naali
    void FarsightChannel::CreateAudioPlaybackElement()
    {
        audio_playback_bin_ = gst_bin_new("audio-output-bin");
        if (audio_playback_bin_ == 0)
            throw Exception("Cannot create GStreamer bin for audio playback.");

        fake_audio_output_ = setUpElement("fakesink");
        if (fake_audio_output_ == 0)
            throw Exception("Cannot create GStreamer fake audio output element.");
        else
        {
            g_signal_connect(fake_audio_output_, "handoff", G_CALLBACK(&FarsightChannel::OnFakeSinkHandoff), this);
            g_object_set(G_OBJECT(fake_audio_output_), "signal-handoffs", TRUE, NULL);
        }

        // audio modifications
        audio_resample_ = gst_element_factory_make("audioresample", NULL);
        if (audio_resample_ == 0)
            throw Exception("Cannot create GStreamer audio resample element.");

        audio_capsfilter_ = gst_element_factory_make("capsfilter", NULL);
        GstCaps *audio_caps = gst_caps_new_simple("audio/x-raw-int",
            "channels", G_TYPE_INT, 1,
            "width", G_TYPE_INT, 16,
//            "depth", G_TYPE_INT, 16,
            "rate", G_TYPE_INT, 16000,
            "signed", G_TYPE_BOOLEAN, true,
//            "endianess", G_TYPE_INT, 1234,
            NULL);
        g_object_set(G_OBJECT(audio_capsfilter_), "caps", audio_caps, NULL);

        //audio_convert_ = gst_element_factory_make("audioconvert", NULL);
        //if (audio_convert_ == 0)
        //    throw Exception("Cannot create GStreamer audio convert element.");

        gst_bin_add_many(GST_BIN(audio_playback_bin_), audio_resample_, fake_audio_output_, NULL);
        gboolean ok = gst_element_link_many(audio_resample_, fake_audio_output_, NULL);
        if (!ok)
        {
            QString error_message = "Cannot link elements for audio playback bin.";
            LogError(error_message.toStdString());
            throw Exception(error_message.toStdString().c_str());
        }

        // add ghost pad to audio_bin_
        GstPad *sink = gst_element_get_static_pad(audio_resample_, "sink");
        audio_playback_bin_sink_pad_ = gst_ghost_pad_new("sink", sink);
        gst_element_add_pad(GST_ELEMENT(audio_playback_bin_), audio_playback_bin_sink_pad_);
        gst_object_unref(G_OBJECT(sink));
        gst_object_ref(audio_playback_bin_);
        gst_object_sink(audio_playback_bin_);
    }
コード例 #14
0
ファイル: rb-player-gst.c プロジェクト: wangd/rhythmbox
static gboolean
impl_remove_tee (RBPlayerGstTee *player, GstElement *element)
{
	RBPlayerGst *mp = RB_PLAYER_GST (player);

	if (mp->priv->tee == NULL) {
		gst_object_sink (element);
		mp->priv->waiting_tees = g_list_remove (mp->priv->waiting_tees, element);
		return TRUE;
	}

	return rb_gst_remove_tee (RB_PLAYER (mp), mp->priv->tee, element, need_pad_blocking (mp));
}
コード例 #15
0
ファイル: rbcluttergst.c プロジェクト: orospakr/rbclutter
void
rbcltgst_initialize_gst_object (VALUE obj, gpointer gstobj)
{
    /* Grab the floating reference if the object is a subclass of
       GstObject */
    if (GST_IS_OBJECT (gstobj))
    {
        gst_object_ref (gstobj);
        gst_object_sink (gstobj);
    }

    G_INITIALIZE (obj, gstobj);
}
コード例 #16
0
ファイル: rb-player-gst.c プロジェクト: wangd/rhythmbox
static gboolean
impl_remove_filter (RBPlayerGstFilter *player, GstElement *element)
{
	RBPlayerGst *mp = RB_PLAYER_GST (player);

	if (mp->priv->filterbin == NULL) {
		gst_object_sink (element);
		mp->priv->waiting_filters = g_list_remove (mp->priv->waiting_filters, element);
		return TRUE;
	}

	return rb_gst_remove_filter (RB_PLAYER (mp), mp->priv->filterbin, element, need_pad_blocking (mp));
}
コード例 #17
0
static void
empathy_video_widget_init (EmpathyVideoWidget *object)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (object);
  GstElement *colorspace, *videoscale;
  ClutterActor *texture;
  GstPad *pad;

  texture = create_clutter_texture (object);

  priv->videosink = gst_bin_new (NULL);

  gst_object_ref (priv->videosink);
  gst_object_sink (priv->videosink);

  priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink");

  priv->sink = clutter_gst_video_sink_new (CLUTTER_TEXTURE (texture));
  g_object_unref (G_OBJECT (texture));
  g_assert (priv->sink != NULL);

  videoscale = gst_element_factory_make ("videoscale", NULL);
  g_assert (videoscale != NULL);

  g_object_set (videoscale, "qos", FALSE, NULL);

  colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
  g_assert (colorspace != NULL);

  g_object_set (colorspace, "qos", FALSE, NULL);

  /* keep a reference so we can set it's "sync" or "async" properties */
  gst_object_ref (priv->sink);
  gst_bin_add_many (GST_BIN (priv->videosink), colorspace, videoscale,
    priv->sink, NULL);

  if (!gst_element_link (colorspace, videoscale))
    g_error ("Failed to link ffmpegcolorspace and videoscale");

  if (!gst_element_link (videoscale, priv->sink))
    g_error ("Failed to link videoscale and gconfvideosink");

  pad = gst_element_get_static_pad (colorspace, "sink");
  g_assert (pad != NULL);

  priv->sink_pad = gst_ghost_pad_new ("sink", pad);
  if (!gst_element_add_pad  (priv->videosink, priv->sink_pad))
    g_error ("Couldn't add sink ghostpad to the bin");

  gst_object_unref (pad);
}
コード例 #18
0
MediaNode::MediaNode(Backend *backend, NodeDescription description) :
        m_isValid(false),
        m_root(0),
        m_audioTee(0),
        m_videoTee(0),
        m_fakeAudioSink(0),
        m_fakeVideoSink(0),
        m_backend(backend),
        m_description(description)
{
    if ((description & AudioSink) && (description & VideoSink)) {
        Q_ASSERT(0); // A node cannot accept both audio and video
    }

    if (description & AudioSource) {
        m_audioTee = gst_element_factory_make("tee", NULL);
        gst_object_ref (GST_OBJECT (m_audioTee));
        gst_object_sink (GST_OBJECT (m_audioTee));     

        // Fake audio sink to swallow unconnected audio pads
        m_fakeAudioSink = gst_element_factory_make("fakesink", NULL);
        g_object_set (G_OBJECT (m_fakeAudioSink), "sync", TRUE, NULL);
        gst_object_ref (GST_OBJECT (m_fakeAudioSink));
        gst_object_sink (GST_OBJECT (m_fakeAudioSink));
    }

    if (description & VideoSource) {
        m_videoTee = gst_element_factory_make("tee", NULL);
        gst_object_ref (GST_OBJECT (m_videoTee));
        gst_object_sink (GST_OBJECT (m_videoTee));     

        // Fake video sink to swallow unconnected video pads
        m_fakeVideoSink = gst_element_factory_make("fakesink", NULL);
        g_object_set (G_OBJECT (m_fakeVideoSink), "sync", TRUE, NULL);
        gst_object_ref (GST_OBJECT (m_fakeVideoSink));
        gst_object_sink (GST_OBJECT (m_fakeVideoSink));
    }
}
コード例 #19
0
ファイル: refpointertest.cpp プロジェクト: xohm/qt-gstreamer
void RefPointerTest::dynamicCastDownObjectTest()
{
    GstObject *bin = GST_OBJECT(gst_object_ref(gst_bin_new(NULL)));
    gst_object_sink(bin);

    {
        QGlib::ObjectPtr object = QGlib::ObjectPtr::wrap(G_OBJECT(bin));
        QVERIFY(!object.dynamicCast<QGst::Object>().isNull());
        QVERIFY(!object.dynamicCast<QGst::Bin>().isNull());
        QVERIFY(object.dynamicCast<QGst::Pipeline>().isNull());
    }

    gst_object_unref(bin);
}
コード例 #20
0
ファイル: refpointertest.cpp プロジェクト: xohm/qt-gstreamer
void RefPointerTest::dynamicCastUpObjectTest()
{
    GstBin *bin = GST_BIN(gst_object_ref(gst_bin_new(NULL)));
    gst_object_sink(bin);

    {
        QGst::BinPtr object = QGst::BinPtr::wrap(bin);
        QVERIFY(!object.dynamicCast<QGst::Element>().isNull());
        QVERIFY(!object.dynamicCast<QGlib::Object>().isNull());
        QVERIFY(!object.dynamicCast<QGst::ChildProxy>().isNull());
    }

    gst_object_unref(bin);
}
コード例 #21
0
QGstreamerVideoOverlay::QGstreamerVideoOverlay(QObject *parent)
    : QVideoWindowControl(parent)
    , m_surface(new QX11VideoSurface)
    , m_videoSink(reinterpret_cast<GstElement*>(QVideoSurfaceGstSink::createSink(m_surface)))
    , m_aspectRatioMode(Qt::KeepAspectRatio)
    , m_fullScreen(false)
{
    if (m_videoSink) {
        gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
        gst_object_sink(GST_OBJECT(m_videoSink));
    }

    connect(m_surface, SIGNAL(surfaceFormatChanged(QVideoSurfaceFormat)),
            this, SLOT(surfaceFormatChanged()));
}
コード例 #22
0
ファイル: refpointertest.cpp プロジェクト: xohm/qt-gstreamer
void RefPointerTest::refTest2()
{
    GstObject *bin = GST_OBJECT(gst_object_ref(GST_OBJECT(gst_bin_new(NULL))));
    gst_object_sink(bin);
    {
        QGst::ObjectPtr object = QGst::ObjectPtr::wrap(bin);
        QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin), 2);
        {
            QGst::ObjectPtr object2 = object;
            QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin), 3);
        }
    }
    QCOMPARE(GST_OBJECT_REFCOUNT_VALUE(bin), 1);
    gst_object_unref(bin);
}
コード例 #23
0
ファイル: glrenderer.cpp プロジェクト: phen89/rtqt
GLRenderer::GLRenderer(VideoWidget* videoWidget) :
    AbstractRenderer(videoWidget)
    , m_glWindow(0)
{
    videoWidget->backend()->logMessage("Creating OpenGL renderer");
    QGLFormat format = QGLFormat::defaultFormat();
    format.setSwapInterval(1);    // Enable vertical sync on draw to avoid tearing
    m_glWindow = new GLRenderWidgetImplementation(videoWidget, format);

    if ((m_videoSink = m_glWindow->createVideoSink())) {    //if ((m_videoSink = m_glWindow->createVideoSink())) {
        gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
        gst_object_sink (GST_OBJECT (m_videoSink));

        QWidgetVideoSinkBase*  sink = reinterpret_cast<QWidgetVideoSinkBase*>(m_videoSink);
        // Let the videosink know which widget to direct frame updates to
        sink->renderWidget = videoWidget;
    }
}
コード例 #24
0
ファイル: empathy-video-widget.c プロジェクト: Elleo/empathy
static void
empathy_video_widget_constructed (GObject *object)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (object);

  priv->videosink = gst_element_factory_make ("gconfvideosink", NULL);
  gst_object_ref (priv->videosink);
  gst_object_sink (priv->videosink);

  priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink");

  fs_element_added_notifier_add (priv->notifier, GST_BIN (priv->videosink));
  gst_bus_enable_sync_message_emission (priv->bus);

  g_signal_connect (priv->bus, "sync-message",
    G_CALLBACK (empathy_video_widget_sync_message_cb), object);

  gtk_widget_set_size_request (GTK_WIDGET (object), priv->min_width,
    priv->min_height);
}
コード例 #25
0
ファイル: qgstreamervideowindow.cpp プロジェクト: xjohncz/qt5
QGstreamerVideoWindow::QGstreamerVideoWindow(QObject *parent, const char *elementName)
    : QVideoWindowControl(parent)
    , m_videoSink(0)
    , m_windowId(0)
    , m_aspectRatioMode(Qt::KeepAspectRatio)
    , m_fullScreen(false)
    , m_colorKey(QColor::Invalid)
{
    if (elementName)
        m_videoSink = gst_element_factory_make(elementName, NULL);
    else
        m_videoSink = gst_element_factory_make("xvimagesink", NULL);

    if (m_videoSink) {
        gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
        gst_object_sink(GST_OBJECT(m_videoSink));

        GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
        m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
    }
}
コード例 #26
0
GstElement *QGstreamerGLTextureRenderer::videoSink()
{
    if (!m_videoSink && isReady()) {
        if (m_context && !m_surface->supportedPixelFormats(EGLImageTextureHandle).isEmpty()) {
#ifdef GL_TEXTURE_SINK_DEBUG
            qDebug() << Q_FUNC_INFO << ": using gltexture sink";
#endif
            if (m_context)
                m_context->makeCurrent();
            m_videoSink = gst_element_factory_make("gltexturesink", "egl-texture-sink");
            g_object_set(G_OBJECT(m_videoSink),
                         "x-display", QX11Info::display(),
                         "egl-display", eglGetDisplay((EGLNativeDisplayType)QX11Info::display()),
                         "egl-context", m_glEnabled ? eglGetCurrentContext()
                                                    : EGL_NO_CONTEXT,
                         "colorkey", m_colorKey.rgb(),
                         "autopaint-colorkey", false,
                         "use-framebuffer-memory", true,
                         "render-mode", m_overlayEnabled ? VIDEO_RENDERSWITCH_XOVERLAY_MODE
                                                         : VIDEO_RENDERSWITCH_TEXTURE_STREAMING_MODE,
                         (char*)NULL);

            g_signal_connect(G_OBJECT(m_videoSink), "frame-ready", G_CALLBACK(handleFrameReady), (gpointer)this);
        } else {
            qWarning() << Q_FUNC_INFO << ": Fallback to QVideoSurfaceGstSink since EGLImageTextureHandle is not supported";
            m_videoSink = reinterpret_cast<GstElement*>(QVideoSurfaceGstSink::createSink(m_surface));
        }

        if (m_videoSink) {
            gst_object_ref(GST_OBJECT(m_videoSink)); //Take ownership
            gst_object_sink(GST_OBJECT(m_videoSink));

            GstPad *pad = gst_element_get_static_pad(m_videoSink,"sink");
            m_bufferProbeId = gst_pad_add_buffer_probe(pad, G_CALLBACK(padBufferProbe), this);
        }
    }

    return m_videoSink;
}
コード例 #27
0
ファイル: widgetrenderer.cpp プロジェクト: FilipBE/qtextended
WidgetRenderer::WidgetRenderer(VideoWidget *videoWidget)
        : AbstractRenderer(videoWidget)
        , m_width(0)
        , m_height(0)
{
    videoWidget->backend()->logMessage("Creating QWidget renderer");
    if ((m_videoSink = GST_ELEMENT(g_object_new(get_type_RGB(), NULL)))) {
        gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
        gst_object_sink (GST_OBJECT (m_videoSink));
        
        QWidgetVideoSinkBase*  sink = reinterpret_cast<QWidgetVideoSinkBase*>(m_videoSink);
        // Let the videosink know which widget to direct frame updates to
        sink->renderWidget = videoWidget;
    }

    // Clear the background with black by default
    QPalette palette;
    palette.setColor(QPalette::Background, Qt::black);
    m_videoWidget->setPalette(palette);
    m_videoWidget->setAutoFillBackground(true);
    m_videoWidget->setAttribute(Qt::WA_NoSystemBackground, false);
    m_videoWidget->setAttribute(Qt::WA_PaintOnScreen, false);
}
コード例 #28
0
void QGstreamerVideoWidgetControl::createVideoWidget()
{
    if (m_widget)
        return;

    m_widget = new QGstreamerVideoWidget;

    m_widget->installEventFilter(this);
    m_windowId = m_widget->winId();

    m_videoSink = gst_element_factory_make ("xvimagesink", NULL);
    if (m_videoSink) {
        // Check if the xv sink is usable
        if (gst_element_set_state(m_videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) {
            gst_object_unref(GST_OBJECT(m_videoSink));
            m_videoSink = 0;
        } else {
            gst_element_set_state(m_videoSink, GST_STATE_NULL);

            g_object_set(G_OBJECT(m_videoSink), "force-aspect-ratio", 1, (const char*)NULL);
#ifdef Q_WS_MAEMO_5
            //the overlay xvideo adapter fails to switch winId,
            //use "SGX Textured Video" adapter instead
            g_object_set(G_OBJECT(m_videoSink), "device", "1", NULL);
#endif
        }
    }

    if (!m_videoSink)
        m_videoSink = gst_element_factory_make ("ximagesink", NULL);

    gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership
    gst_object_sink (GST_OBJECT (m_videoSink));


}
コード例 #29
0
ファイル: VideoWidget.cpp プロジェクト: caocao/naali
    VideoWidget::VideoWidget(GstBus *bus,  QWidget *parent, const QString &name, const QString &video_sink_name) 
        : Communication::VideoPlaybackWidgetInterface(parent), 
          bus_((GstBus *) gst_object_ref(bus)), 
          video_overlay_(0), 
          video_playback_element_(0),
          video_playback_bin_(0),
          name_(name),
          window_id_(0),
          on_element_added_g_signal_(0),
          on_sync_message_g_signal_(0)

    {
        qDebug() << "VideoWidget " << name << " INIT STARTED";
        setWindowTitle(name);

        // Element notifier init
        notifier_ = fs_element_added_notifier_new();
        on_element_added_g_signal_ = g_signal_connect(notifier_, "element-added", G_CALLBACK(&VideoWidget::OnElementAdded), this);

// UNIX -> autovideosink
       
#ifdef Q_WS_X11

        qt_x11_set_global_double_buffer(false);

        //video_playback_element_ = gst_element_factory_make(video_sink_name.toStdString().c_str(), 0);
        //gst_object_ref(video_playback_element_);
        //gst_object_sink(video_playback_element_);
        //fs_element_added_notifier_add(notifier_, GST_BIN(video_playback_element_));

#endif
        // WINDOWS -> autovideosink will chose one of there: glimagesink (best), directdrawsink (possible buffer errors), dshowvideosink (possible buffer errors)
        // X11 -> 
        video_playback_element_ = gst_element_factory_make(video_sink_name.toStdString().c_str(), 0);
        if (!video_playback_element_)
        {
            qDebug() << "VideoWidget " << name << " CANNOT CREATE video_playback_element_ (" << video_sink_name <<")";
            return;
        }

        // Video bin init
        const QString video_bin_name = "video_bin_for_" + name;
        video_playback_bin_ = gst_bin_new(video_bin_name.toStdString().c_str());
        if (!video_playback_bin_)
        {
            qDebug() << "VideoWidget " << name << " CANNOT CREATE video_bin_";
            return;
        }

        // Add playback element to video bin
        gst_bin_add(GST_BIN(video_playback_bin_), video_playback_element_);

        // Pad inits
        GstPad *static_sink_pad = gst_element_get_static_pad(video_playback_element_, "sink");
        GstPad *sink_ghost_pad = gst_ghost_pad_new("sink", static_sink_pad);

        // Add pad to video bin
        gst_element_add_pad(GST_ELEMENT(video_playback_bin_), sink_ghost_pad);
        gst_object_unref(G_OBJECT(static_sink_pad));
        gst_object_ref(video_playback_bin_);
        gst_object_sink(video_playback_bin_);

        fs_element_added_notifier_add(notifier_, GST_BIN(video_playback_bin_));

        gst_bus_enable_sync_message_emission(bus_);
        on_sync_message_g_signal_ = g_signal_connect(bus_, "sync-message", G_CALLBACK(&VideoWidget::OnSyncMessage), this);

        qDebug() << "VideoWidget " << name << " INIT COMPLETE";

        // QWidget properties
        QPalette palette;
        palette.setColor(QPalette::Background, Qt::black);
        palette.setColor(QPalette::Window, Qt::black);
        setPalette(palette);
        
        // Show nothing and lets put qwidgets as normal external windows
        //setAutoFillBackground(true);
        //setAttribute(Qt::WA_NoSystemBackground, true);
        //setAttribute(Qt::WA_PaintOnScreen, true);

        setWindowFlags(Qt::Dialog);
        resize(VIDEO_WIDTH, VIDEO_HEIGHT);
        setMinimumSize(VIDEO_WIDTH, VIDEO_HEIGHT);
    }
コード例 #30
0
static void
gst_camerabin_image_dispose (GstCameraBinImage * img)
{
  GST_DEBUG_OBJECT (img, "disposing");

  g_string_free (img->filename, TRUE);
  img->filename = NULL;

  if (img->elements) {
    g_list_free (img->elements);
    img->elements = NULL;
  }

  if (img->sink) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->sink), GST_OBJECT_REFCOUNT_VALUE (img->sink));
    gst_object_unref (img->sink);
    img->sink = NULL;
  }

  if (img->formatter) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->formatter),
        GST_OBJECT_REFCOUNT_VALUE (img->formatter));
    gst_object_unref (img->formatter);
    img->formatter = NULL;
  }

  if (img->app_formatter) {
    gst_object_sink (img->app_formatter);
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->app_formatter),
        GST_OBJECT_REFCOUNT_VALUE (img->app_formatter));
    gst_object_unref (img->app_formatter);
    img->app_formatter = NULL;
  }

  if (img->enc) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->enc), GST_OBJECT_REFCOUNT_VALUE (img->enc));
    gst_object_unref (img->enc);
    img->enc = NULL;
  }

  if (img->csp) {
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->csp), GST_OBJECT_REFCOUNT_VALUE (img->csp));
    gst_object_unref (img->csp);
    img->csp = NULL;
  }

  /* Note: if imagebin was never set to READY state the
     ownership of elements created by application were never
     taken by bin and therefore gst_object_sink is called for
     these elements (they may still be in floating state
     and not unreffed properly without sinking first)
   */
  if (img->app_enc) {
    gst_object_sink (img->app_enc);
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->app_enc),
        GST_OBJECT_REFCOUNT_VALUE (img->app_enc));
    gst_object_unref (img->app_enc);
    img->app_enc = NULL;
  }

  if (img->post) {
    gst_object_sink (img->post);
    GST_LOG_OBJECT (img, "disposing %s with refcount %d",
        GST_ELEMENT_NAME (img->post), GST_OBJECT_REFCOUNT_VALUE (img->post));
    gst_object_unref (img->post);
    img->post = NULL;
  }

  G_OBJECT_CLASS (parent_class)->dispose ((GObject *) img);
}