static GstBusSyncReply create_window (GstBus * bus, GstMessage * message, gpointer data) { GstGLClutterActor **actor = (GstGLClutterActor **) data; static gint count = 0; static GMutex mutex; // ignore anything but 'prepare-window-handle' element messages if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT) return GST_BUS_PASS; if (!gst_is_video_overlay_prepare_window_handle_message (message)) return GST_BUS_PASS; g_mutex_lock (&mutex); if (count < N_ACTORS) { g_message ("adding actor %d", count); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)), actor[count]->win); clutter_threads_add_idle ((GSourceFunc) create_actor, actor[count]); count++; } g_mutex_unlock (&mutex); gst_message_unref (message); return GST_BUS_DROP; }
void GStreamerGWorld::setWindowOverlay(GstMessage* message) { GstObject* sink = GST_MESSAGE_SRC(message); #ifndef GST_API_VERSION_1 if (!GST_IS_X_OVERLAY(sink)) #else if (!GST_IS_VIDEO_OVERLAY(sink)) #endif return; if (g_object_class_find_property(G_OBJECT_GET_CLASS(sink), "force-aspect-ratio")) g_object_set(sink, "force-aspect-ratio", TRUE, NULL); if (m_videoWindow) { m_videoWindow->prepareForOverlay(message); #ifndef GST_API_VERSION_1 // gst_x_overlay_set_window_handle was introduced in -plugins-base // 0.10.31, just like the macro for checking the version. #ifdef GST_CHECK_PLUGINS_BASE_VERSION gst_x_overlay_set_window_handle(GST_X_OVERLAY(sink), m_videoWindow->videoWindowId()); #else gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(sink), m_videoWindow->videoWindowId()); #endif #else gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), m_videoWindow->videoWindowId()); #endif } }
static GstBusSyncReply create_window(GstBus *bus, GstMessage *msg, gpointer data) { preview_t *preview = (preview_t*)data; switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_ELEMENT: { #if GST_CHECK_VERSION(1, 0, 0) if (!gst_is_video_overlay_prepare_window_handle_message(msg)) return GST_BUS_PASS; gst_video_overlay_set_window_handle( GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(msg)), preview->xid); #else if (!gst_structure_has_name(msg->structure, "prepare-xwindow-id")) return GST_BUS_PASS; #if !defined(_WIN32) gst_x_overlay_set_xwindow_id( GST_X_OVERLAY(GST_MESSAGE_SRC(msg)), preview->xid); #else gst_directdraw_sink_set_window_id( GST_X_OVERLAY(GST_MESSAGE_SRC(msg)), preview->xid); #endif #endif gst_message_unref(msg); return GST_BUS_DROP; } break; default: { } break; } return GST_BUS_PASS; }
void gst_binding_set_xid (gstPlay *play, gulong xid) { if (play == NULL) return; play->xid = xid; if (play->overlay != NULL && GST_IS_VIDEO_OVERLAY (play->overlay)) gst_video_overlay_set_window_handle (play->overlay, xid); }
static void _overlay_set_window_handle (GstVideoOverlay * overlay, guintptr handle) { GESPipeline *pipeline = GES_PIPELINE (overlay); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (pipeline-> priv->playsink), handle); }
int tsmf_window_create(TSMFGstreamerDecoder* decoder) { struct X11Handle* hdl; if (decoder->media_type != TSMF_MAJOR_TYPE_VIDEO) { decoder->ready = TRUE; return -3; } else { #if GST_VERSION_MAJOR > 0 GstVideoOverlay *overlay = GST_VIDEO_OVERLAY(decoder->outsink); #else GstXOverlay *overlay = GST_X_OVERLAY(decoder->outsink); #endif if (!decoder) return -1; if (!decoder->platform) return -1; hdl = (struct X11Handle*) decoder->platform; if (!hdl->subwin) { int event, error; hdl->subwin = XCreateSimpleWindow(hdl->disp, *(int *)hdl->xfwin, 0, 0, 1, 1, 0, 0, 0); if (!hdl->subwin) { WLog_ERR(TAG, "Could not create subwindow!"); } XMapWindow(hdl->disp, hdl->subwin); XSync(hdl->disp, FALSE); #if GST_VERSION_MAJOR > 0 gst_video_overlay_set_window_handle(overlay, hdl->subwin); #else gst_x_overlay_set_window_handle(overlay, hdl->subwin); #endif decoder->ready = TRUE; #if defined(WITH_XEXT) hdl->has_shape = XShapeQueryExtension(hdl->disp, &event, &error); #endif } #if GST_VERSION_MAJOR > 0 gst_video_overlay_handle_events(overlay, TRUE); #else gst_x_overlay_handle_events(overlay, TRUE); #endif return 0; } }
static gboolean bus_call (GstBus *bus,GstMessage *msg, gpointer data) { if (gst_is_video_overlay_prepare_window_handle_message (msg)) { if (0 != g_video_xid) { GstVideoOverlay *overlay; // GST_MESSAGE_SRC (message) will be the video sink element overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (msg)); gst_video_overlay_set_window_handle (overlay, g_video_xid); } else { g_warning ("Should have obtained video_window_handle by now!"); } } switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("####################### Stream Ends\n"); gtk_main_quit(); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("####################### Error: %s\n", error->message); g_error_free (error); gtk_main_quit(); break; } case GST_MESSAGE_STATE_CHANGED: { GstState oldState; GstState newState; GstState pendingState; gst_message_parse_state_changed(msg, &oldState, &newState, &pendingState); g_printf("####################### oldState:%d, newState:%d, pendingState:%d!\n", oldState, newState, pendingState); break; } case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType statusType; GstElement* owner = NULL; gst_message_parse_stream_status(msg, &statusType, &owner); g_printf("####################### statusType:%d, owner:%p!\n", statusType, owner); break; } default: break; } return TRUE; }
static gboolean gst_switch_ptz_prepare (GstSwitchPTZ * ptz) { GstWorker *worker = GST_WORKER (ptz); GstElement *sink = gst_worker_get_element_unlocked (worker, "sink"); gulong handle = GDK_WINDOW_XID (gtk_widget_get_window (ptz->video_view)); g_return_val_if_fail (GST_IS_ELEMENT (sink), FALSE); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), handle); gst_object_unref (sink); return TRUE; }
bool CGstPlayback::SetWindow(void *window) { returnb_assert(m_playbin); g_print("%s, get GST_TYPE_VIDEO_OVERLAY", __func__); m_video_sink = gst_bin_get_by_interface(GST_BIN(m_playbin), GST_TYPE_VIDEO_OVERLAY); returnb_assert(m_video_sink); g_print("%s, set native window=(%x)", __func__, window); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (m_video_sink), (guintptr)window); return true; }
static GstBusSyncReply bus_sync_handler(GstBus *bus, GstMessage *msg, gpointer data) { if (!gst_is_video_overlay_prepare_window_handle_message(msg)) return GST_BUS_PASS; guintptr video_window_handle = gui_get_video_window_handle(); g_assert(video_window_handle != 0); // GST_MESSAGE_SRC (message) will be the video sink element. video_window_overlay = GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(msg)); gst_video_overlay_set_window_handle(video_window_overlay, video_window_handle); int x, y, w, h; gui_get_render_rectangle(&x, &y, &w, &h); gst_video_overlay_set_render_rectangle(video_window_overlay, x, y, w, h); return GST_BUS_DROP; }
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) { CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id); if (!data) return; GST_DEBUG ("Releasing Native Window %p", data->native_window); if (data->video_sink) { gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->video_sink), (guintptr)NULL); gst_element_set_state (data->pipeline, GST_STATE_READY); } ANativeWindow_release (data->native_window); data->native_window = NULL; data->initialized = FALSE; }
static GstBusSyncReply gst_sync_watch (GstBus *bus, GstMessage *message, gpointer data) { gstPlay *play = (gstPlay *)data; if (play == NULL) return FALSE; if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT) { if (gst_is_video_overlay_prepare_window_handle_message(message)) { play->overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)); gst_video_overlay_set_window_handle (play->overlay, play->xid); } } return TRUE; }
/** * @brief * @param disp The GstVideoDisp instance. * @memberof GstVideoDisp */ static gboolean gst_video_disp_prepare (GstVideoDisp * disp) { GstWorker *worker = GST_WORKER (disp); GstElement *sink = gst_worker_get_element_unlocked (worker, "sink"); g_return_val_if_fail (GST_IS_ELEMENT (sink), FALSE); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), disp->handle); gst_object_unref (sink); //INFO ("prepared display video on %ld", disp->handle); return TRUE; }
/* Check if all conditions are met to report GStreamer as initialized. * These conditions will change depending on the application */ static void check_initialization_complete (CustomData *data) { JNIEnv *env = get_jni_env (); if (!data->initialized && data->native_window && data->main_loop) { GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window, data->main_loop); /* The main loop is running and we received a native window, inform the sink about it */ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->video_sink), (guintptr)data->native_window); (*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id); if ((*env)->ExceptionCheck (env)) { GST_ERROR ("Failed to call Java method"); (*env)->ExceptionClear (env); } data->initialized = TRUE; } }
static void gst_gl_sink_bin_overlay_set_window_handle (GstVideoOverlay * overlay, guintptr handle) { GstGLSinkBin *self = GST_GL_SINK_BIN (overlay); GstVideoOverlay *overlay_element = NULL; overlay_element = GST_VIDEO_OVERLAY (gst_bin_get_by_interface (GST_BIN (self), GST_TYPE_VIDEO_OVERLAY)); if (overlay_element) { gst_video_overlay_set_window_handle (overlay_element, handle); gst_object_unref (overlay_element); } }
/* This function is called when the GUI toolkit creates the physical window that will hold the video. * At this point we can retrieve its handler (which has a different meaning depending on the windowing system) * and pass it to GStreamer through the XOverlay interface. */ static void realize_cb (GtkWidget *widget, CustomData *data) { GdkWindow *window = gtk_widget_get_window (widget); guintptr window_handle; if (!gdk_window_ensure_native (window)) g_error ("Couldn't create native window needed for GstXOverlay!"); /* Retrieve window handler from GDK */ #if defined (GDK_WINDOWING_WIN32) window_handle = (guintptr)GDK_WINDOW_HWND (window); #elif defined (GDK_WINDOWING_QUARTZ) window_handle = gdk_quartz_window_get_nsview (window); #elif defined (GDK_WINDOWING_X11) window_handle = GDK_WINDOW_XID (window); #endif /* Pass it to playbin, which implements XOverlay and will forward it to the video sink */ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle); }
static GstBusSyncReply bus_sync_callback (GstBus * bus, GstMessage * message, gpointer data) { GtkWidget *ui_drawing; if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT) return GST_BUS_PASS; if (!gst_message_has_name (message, "prepare-window-handle")) return GST_BUS_PASS; /* FIXME: make sure to get XID in main thread */ ui_drawing = GTK_WIDGET (gtk_builder_get_object (builder, "viewfinderArea")); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (message->src), GDK_WINDOW_XID (gtk_widget_get_window (ui_drawing))); gst_message_unref (message); return GST_BUS_DROP; }
static void gst_player_video_overlay_video_renderer_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstPlayerVideoOverlayVideoRenderer *self = GST_PLAYER_VIDEO_OVERLAY_VIDEO_RENDERER (object); switch (prop_id) { case VIDEO_OVERLAY_VIDEO_RENDERER_PROP_WINDOW_HANDLE: self->window_handle = g_value_get_pointer (value); if (self->video_overlay) gst_video_overlay_set_window_handle (self->video_overlay, (guintptr) self->window_handle); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
GstBusSyncReply Pipeline::create_window (GstBus* bus, GstMessage* message, const Pipeline* p) { // ignore anything but 'prepare-window-handle' element messages if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT) return GST_BUS_PASS; if (!gst_is_video_overlay_prepare_window_handle_message (message)) return GST_BUS_PASS; qDebug ("setting window handle"); //Passing 0 as the window_handle will tell the overlay to stop using that window and create an internal one. //In the directdrawsink's gst_video_overlay_set_window_handle implementation, window_handle (parameter 2) is casted to HWND before it used. gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)), (guintptr)p->winId()); gst_message_unref (message); return GST_BUS_DROP; }
// static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) { // if (g_strcmp0 (gst_structure_get_name (msg->structure), "tags-changed") == 0) { // /* If the message is the "tags-changed" (only one we are currently issuing), update // * the stream info GUI */ // analyze_streams (data); // } // } static GstBusSyncReply busSyncHandler (GstBus * bus, GstMessage * message, gpointer user_data) { // ignore anything but 'prepare-window-handle' element messages if (!gst_is_video_overlay_prepare_window_handle_message (message)) return GST_BUS_PASS; if (window_handle != 0) { GstVideoOverlay *overlay; // GST_MESSAGE_SRC (message) will be the video sink element overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)); gst_video_overlay_set_window_handle (overlay, window_handle); } else { g_warning ("Should have obtained video_window_handle by now!"); } gst_message_unref (message); return GST_BUS_DROP; }
static GstElement *gst_player_video_overlay_video_renderer_create_video_sink (GstPlayerVideoRenderer * iface, GstPlayer * player) { GstElement *video_overlay; GstPlayerVideoOverlayVideoRenderer *self = GST_PLAYER_VIDEO_OVERLAY_VIDEO_RENDERER (iface); if (self->video_overlay) gst_object_unref (self->video_overlay); video_overlay = gst_player_get_pipeline (player); g_return_val_if_fail (GST_IS_VIDEO_OVERLAY (video_overlay), NULL); self->video_overlay = GST_VIDEO_OVERLAY (video_overlay); gst_video_overlay_set_window_handle (self->video_overlay, (guintptr) self->window_handle); return NULL; }
static void player_created (G_GNUC_UNUSED SnraClient *client, GstElement *playbin, GtkWidget *window) { #if GST_CHECK_VERSION (0, 11, 1) gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (playbin), GDK_WINDOW_XID (gtk_widget_get_window (window))); #else GstBus *bus; guintptr window_handle; window_handle = GDK_WINDOW_XID (gtk_widget_get_window (window)); bus = gst_element_get_bus (playbin); gst_bus_set_sync_handler (bus, bus_sync_handler, GUINT_TO_POINTER (window_handle)); gst_object_unref (bus); #endif g_print ("Player created\n"); }
static GstBusSyncReply set_playbin_window (GstBus *bus, GstMessage *message, GthMediaViewerPage *self) { /* ignore anything but 'prepare-xwindow-id' element messages */ if (! gst_is_video_overlay_prepare_window_handle_message (message)) return GST_BUS_PASS; if (self->priv->video_window_xid != 0) { GstVideoOverlay *video_overlay; video_overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)); gst_video_overlay_set_window_handle (video_overlay, self->priv->video_window_xid); self->priv->xwin_assigned = TRUE; } else g_warning ("Should have obtained video_window_xid by now!"); gst_message_unref (message); return GST_BUS_DROP; }
bool GstShow::init_pipeline(const int xwinid) { pipeline = gst_pipeline_new ("xvoverlay"); //create base pipeline elements videosink = gst_element_factory_make("xvimagesink", NULL); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (videosink), xwinid); mixer = gst_element_factory_make("videomixer", "mix"); ///* Manually linking the videoboxes to the mixer */ GstPadTemplate *mixer_sink_pad_template = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(mixer), "sink_%u"); if(mixer_sink_pad_template == NULL) { g_printerr("Could not get mixer pad template.\n"); // gst_object_unref(something); return false; } GstPad* mixerpads[2]; mixerpads[0] = gst_element_request_pad(mixer, mixer_sink_pad_template, NULL, NULL); mixerpads[1] = gst_element_request_pad(mixer, mixer_sink_pad_template, NULL, NULL); g_object_set(mixerpads[0], "xpos", 0, NULL); g_object_set(mixerpads[0], "ypos", 0, NULL); g_object_set(mixerpads[0], "alpha",1.0, NULL); g_object_set(mixerpads[1], "xpos", 640, NULL); g_object_set(mixerpads[1], "ypos", 0, NULL); g_object_set(mixerpads[1], "alpha",1.0, NULL); gst_object_unref(mixerpads[0]); gst_object_unref(mixerpads[1]); // prepare queue and scale for (int i = 0; i<2; i++) { queue[i] = gst_element_factory_make("queue", NULL); scale[i] = gst_element_factory_make("videoscale", NULL); scalefilter[i] = gst_element_factory_make("capsfilter", NULL); GstCaps *caps = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, //"format", G_TYPE_STRING, "BGR", NULL); caps = gst_caps_fixate(caps); g_object_set(G_OBJECT(scalefilter[i]), "caps", caps, NULL); gst_caps_unref(caps); } gst_bin_add_many(GST_BIN(pipeline), queue[0], queue[1], scale[0], scale[1], scalefilter[0], scalefilter[1], mixer, videosink, NULL); return true; }
gint main (gint argc, gchar ** argv) { GdkWindow *video_window_xwindow; GtkWidget *window, *video_window; GstElement *pipeline, *src, *sink; GstStateChangeReturn sret; gulong embed_xid = 0; gboolean force_aspect = FALSE, draw_borders = FALSE; gst_init (&argc, &argv); gtk_init (&argc, &argv); if (argc) { gint arg; for (arg = 0; arg < argc; arg++) { if (!strcmp (argv[arg], "-a")) force_aspect = TRUE; else if (!strcmp (argv[arg], "-b")) draw_borders = TRUE; else if (!strcmp (argv[arg], "-v")) verbose = TRUE; } } /* prepare the pipeline */ pipeline = gst_pipeline_new ("xvoverlay"); src = gst_element_factory_make ("videotestsrc", NULL); sink = gst_element_factory_make ("xvimagesink", NULL); gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL); gst_element_link (src, sink); g_object_set (G_OBJECT (sink), "handle-events", FALSE, "force-aspect-ratio", force_aspect, "draw-borders", draw_borders, NULL); /* prepare the ui */ window = gtk_window_new (GTK_WINDOW_TOPLEVEL); g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (window_closed), (gpointer) pipeline); gtk_window_set_default_size (GTK_WINDOW (window), 320, 240); video_window = gtk_drawing_area_new (); gtk_widget_set_double_buffered (video_window, FALSE); gtk_container_add (GTK_CONTAINER (window), video_window); /* show the gui and play */ gtk_widget_show_all (window); /* realize window now so that the video window gets created and we can * obtain its XID before the pipeline is started up and the videosink * asks for the XID of the window to render onto */ gtk_widget_realize (window); video_window_xwindow = gtk_widget_get_window (video_window); embed_xid = GDK_WINDOW_XID (video_window_xwindow); if (verbose) { g_print ("Window realize: got XID %lu\n", embed_xid); } /* we know what the video sink is in this case (xvimagesink), so we can * just set it directly here now (instead of waiting for a * prepare-window-handle element message in a sync bus handler and setting * it there) */ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), embed_xid); anim_state.overlay = GST_VIDEO_OVERLAY (sink); anim_state.widget = video_window; anim_state.w = 320; anim_state.h = 240; anim_state.a = 0.0; anim_state.p = (G_PI + G_PI) / 200.0; handle_resize_cb (video_window, NULL, sink); g_signal_connect (video_window, "configure-event", G_CALLBACK (handle_resize_cb), NULL); g_signal_connect (video_window, "draw", G_CALLBACK (handle_draw_cb), NULL); g_timeout_add (50, (GSourceFunc) animate_render_rect, NULL); /* run the pipeline */ sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (sret == GST_STATE_CHANGE_FAILURE) gst_element_set_state (pipeline, GST_STATE_NULL); else { anim_state.running = TRUE; gtk_main (); } gst_object_unref (pipeline); return 0; }
static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer) { OwrVideoRenderer *video_renderer; OwrVideoRendererPrivate *priv; GstElement *videorate, *videoscale, *videoconvert, *capsfilter, *balance, *queue, *sink; GstCaps *filter_caps; GstPad *ghostpad, *sinkpad; gint fps_n = 0, fps_d = 1; gchar *bin_name; g_assert(renderer); video_renderer = OWR_VIDEO_RENDERER(renderer); priv = video_renderer->priv; g_mutex_lock(&priv->video_renderer_lock); if (priv->renderer_bin) goto done; bin_name = g_strdup_printf("video-renderer-bin-%u", g_atomic_int_add(&unique_bin_id, 1)); priv->renderer_bin = gst_bin_new(bin_name); g_free(bin_name); gst_bin_add(GST_BIN(_owr_get_pipeline()), priv->renderer_bin); gst_element_sync_state_with_parent(GST_ELEMENT(priv->renderer_bin)); videorate = gst_element_factory_make("videorate", "video-renderer-rate"); g_object_set(videorate, "drop-only", TRUE, NULL); videoscale = gst_element_factory_make("videoscale", "video-renderer-scale"); videoconvert = gst_element_factory_make(VIDEO_CONVERT, "video-renderer-convert"); gst_util_double_to_fraction(priv->max_framerate, &fps_n, &fps_d); capsfilter = gst_element_factory_make("capsfilter", "video-renderer-capsfilter"); filter_caps = gst_caps_new_empty_simple("video/x-raw"); if (priv->width > 0) gst_caps_set_simple(filter_caps, "width", G_TYPE_INT, priv->width, NULL); if (priv->height > 0) gst_caps_set_simple(filter_caps, "height", G_TYPE_INT, priv->height, NULL); if (fps_n > 0 && fps_d > 0) gst_caps_set_simple(filter_caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); g_object_set(capsfilter, "caps", filter_caps, NULL); balance = gst_element_factory_make("videobalance", "video-renderer-balance"); g_signal_connect_object(renderer, "notify::disabled", G_CALLBACK(renderer_disabled), balance, 0); renderer_disabled(renderer, NULL, balance); queue = gst_element_factory_make("queue", "video-renderer-queue"); g_assert(queue); g_object_set(queue, "max-size-buffers", 3, "max-size-bytes", 0, "max-size-time", 0, NULL); sink = gst_element_factory_make(VIDEO_SINK, "video-renderer-sink"); g_assert(sink); if (GST_IS_VIDEO_OVERLAY(sink)) gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), priv->window_handle); /* async false is needed when using live sources to not require prerolling * as prerolling is not possible from live sources in GStreamer */ g_object_set(sink, "async", FALSE, NULL); gst_bin_add_many(GST_BIN(priv->renderer_bin), videorate, videoscale, videoconvert, capsfilter, balance, queue, sink, NULL); LINK_ELEMENTS(queue, sink); LINK_ELEMENTS(balance, queue); LINK_ELEMENTS(capsfilter, balance); LINK_ELEMENTS(videoconvert, capsfilter); LINK_ELEMENTS(videoscale, videoconvert); LINK_ELEMENTS(videorate, videoscale); sinkpad = gst_element_get_static_pad(videorate, "sink"); g_assert(sinkpad); ghostpad = gst_ghost_pad_new("sink", sinkpad); gst_pad_set_active(ghostpad, TRUE); gst_element_add_pad(priv->renderer_bin, ghostpad); gst_object_unref(sinkpad); gst_element_sync_state_with_parent(sink); gst_element_sync_state_with_parent(queue); gst_element_sync_state_with_parent(balance); gst_element_sync_state_with_parent(capsfilter); gst_element_sync_state_with_parent(videoconvert); gst_element_sync_state_with_parent(videoscale); gst_element_sync_state_with_parent(videorate); done: g_mutex_unlock(&priv->video_renderer_lock); return priv->renderer_bin; }