static void create_playbin (GthMediaViewerPage *self) { GSettings *settings; GstBus *bus; if (self->priv->playbin != NULL) return; self->priv->playbin = gst_element_factory_make ("playbin", "playbin"); settings = g_settings_new (GTHUMB_GSTREAMER_TOOLS_SCHEMA); g_object_set (self->priv->playbin, "volume", (double) g_settings_get_int (settings, PREF_GSTREAMER_TOOLS_VOLUME) / 100.0, "force-aspect-ratio", TRUE, NULL); g_object_unref (settings); bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->playbin)); gst_bus_enable_sync_message_emission (bus); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) set_playbin_window, self, NULL); gst_bus_add_signal_watch (bus); g_signal_connect (self->priv->playbin, "notify::volume", G_CALLBACK (playbin_notify_volume_cb), self); g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), self); }
/** * gst_validate_pipeline_monitor_new: * @pipeline: (transfer none): a #GstPipeline to run Validate on */ GstValidatePipelineMonitor * gst_validate_pipeline_monitor_new (GstPipeline * pipeline, GstValidateRunner * runner, GstValidateMonitor * parent) { GstBus *bus; GstValidatePipelineMonitor *monitor = g_object_new (GST_TYPE_VALIDATE_PIPELINE_MONITOR, "object", pipeline, "validate-runner", runner, "validate-parent", parent, "pipeline", pipeline, NULL); if (GST_VALIDATE_MONITOR_GET_OBJECT (monitor) == NULL) { g_object_unref (monitor); return NULL; } gst_validate_pipeline_monitor_create_scenarios (GST_VALIDATE_BIN_MONITOR (monitor)); bus = gst_element_get_bus (GST_ELEMENT (pipeline)); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message", (GCallback) _bus_handler, monitor); gst_object_unref (bus); if (g_strcmp0 (G_OBJECT_TYPE_NAME (pipeline), "GstPlayBin") == 0) monitor->is_playbin = TRUE; else if (g_strcmp0 (G_OBJECT_TYPE_NAME (pipeline), "GstPlayBin3") == 0) monitor->is_playbin3 = TRUE; return monitor; }
int main (int argc, char *argv[]) { GstElement *bin, *fakesrc, *fakesink; GstBus *bus; GstStateChangeReturn ret; gst_init (&argc, &argv); /* create a new bin to hold the elements */ bin = gst_pipeline_new ("pipeline"); g_assert (bin); /* create a source */ fakesrc = gst_element_factory_make ("fakesrc", "fakesrc"); g_assert (fakesrc); g_object_set (fakesrc, "num-buffers", 50, NULL); /* and a sink */ fakesink = gst_element_factory_make ("fakesink", "fakesink"); g_assert (fakesink); /* add objects to the main pipeline */ gst_bin_add_many (GST_BIN (bin), fakesrc, fakesink, NULL); /* link the elements */ gst_element_link (fakesrc, fakesink); loop = g_main_loop_new (NULL, FALSE); /* get the bus, we need to install a sync handler */ bus = gst_pipeline_get_bus (GST_PIPELINE (bin)); gst_bus_enable_sync_message_emission (bus); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "sync-message::stream-status", (GCallback) on_stream_status, NULL); g_signal_connect (bus, "message::error", (GCallback) on_error, NULL); g_signal_connect (bus, "message::eos", (GCallback) on_eos, NULL); /* start playing */ ret = gst_element_set_state (bin, GST_STATE_PLAYING); if (ret != GST_STATE_CHANGE_SUCCESS) { g_message ("failed to change state"); return -1; } /* Run event loop listening for bus messages until EOS or ERROR */ g_main_loop_run (loop); /* stop the bin */ gst_element_set_state (bin, GST_STATE_NULL); gst_object_unref (bus); g_main_loop_unref (loop); return 0; }
void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline) { m_pipeline = pipeline; GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); gst_bus_enable_sync_message_emission(bus.get()); g_signal_connect(bus.get(), "sync-message::need-context", G_CALLBACK(mediaPlayerPrivateNeedContextMessageCallback), this); }
MacWidget::MacWidget(GstElement * inPipeline, WindowId) : mPipeline(inPipeline) { // Listen to synchronous messages Gst::ScopedObject<GstBus> bus(gst_pipeline_get_bus(GST_PIPELINE(mPipeline))); gst_bus_enable_sync_message_emission(bus.get()); g_signal_connect(bus.get(), "sync-message::element", G_CALLBACK(&MacWidget::OnSyncMessage), gpointer(this)); }
static void empathy_video_widget_constructed (GObject *object) { EmpathyVideoWidgetPriv *priv = GET_PRIV (object); GstElement *colorspace, *videoscale, *sink; GstPad *pad; priv->videosink = gst_bin_new (NULL); gst_object_ref (priv->videosink); gst_object_sink (priv->videosink); priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink"); sink = gst_element_factory_make ("gconfvideosink", NULL); g_assert (sink != NULL); videoscale = gst_element_factory_make ("videoscale", NULL); g_assert (videoscale != NULL); g_object_set (videoscale, "qos", FALSE, NULL); colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL); g_assert (colorspace != NULL); g_object_set (colorspace, "qos", FALSE, NULL); gst_bin_add_many (GST_BIN (priv->videosink), colorspace, videoscale, sink, NULL); if (!gst_element_link (colorspace, videoscale)) g_error ("Failed to link ffmpegcolorspace and videoscale"); if (!gst_element_link (videoscale, sink)) g_error ("Failed to link videoscale and gconfvideosink"); pad = gst_element_get_static_pad (colorspace, "sink"); g_assert (pad != NULL); priv->sink_pad = gst_ghost_pad_new ("sink", pad); if (!gst_element_add_pad (priv->videosink, priv->sink_pad)) g_error ("Couldn't add sink ghostpad to the bin"); gst_object_unref (pad); fs_element_added_notifier_add (priv->notifier, GST_BIN (priv->videosink)); gst_bus_enable_sync_message_emission (priv->bus); g_signal_connect (priv->bus, "sync-message", G_CALLBACK (empathy_video_widget_sync_message_cb), object); gtk_widget_set_size_request (GTK_WIDGET (object), priv->min_width, priv->min_height); }
int main (int argc, char *argv[]) { GstElement *bin; GstBus *bus; gst_init (&argc, &argv); if (argc < 2) { g_print ("usage: %s <uri>\n", argv[0]); return -1; } /* create a new bin to hold the elements */ bin = gst_element_factory_make ("playbin", "bin"); g_assert (bin); g_object_set (bin, "uri", argv[1], NULL); bus = gst_pipeline_get_bus (GST_PIPELINE (bin)); gst_bus_add_signal_watch (bus); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "message", (GCallback) handle_message, bin); g_signal_connect (bus, "sync-message", (GCallback) handle_sync_message, bin); /* go to the PAUSED state and wait for preroll */ g_message ("prerolling first frame"); gst_element_set_state (bin, GST_STATE_PAUSED); gst_element_get_state (bin, NULL, NULL, -1); /* queue step */ do_step (bin); gst_element_set_state (bin, GST_STATE_PLAYING); loop = g_main_loop_new (NULL, TRUE); g_main_loop_run (loop); g_message ("finished"); /* stop the bin */ gst_element_set_state (bin, GST_STATE_NULL); g_main_loop_unref (loop); gst_object_unref (bus); exit (0); }
void Pipeline::configure () { #ifdef Q_WS_WIN m_loop = g_main_loop_new (NULL, FALSE); #endif if (m_videoLocation.isEmpty ()) { qDebug ("No video file specified. Using video test source."); m_pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! " "video/x-raw, width=640, height=480, " "framerate=(fraction)30/1 ! " "glupload ! gleffects effect=5 ! fakesink sync=1", NULL)); } else { QByteArray ba = m_videoLocation.toLocal8Bit (); qDebug ("Loading video: %s", ba.data ()); gchar *pipeline = g_strdup_printf ("filesrc name=f ! " "decodebin ! gleffects effect=5 ! " "fakesink sync=1"); m_pipeline = GST_PIPELINE (gst_parse_launch (pipeline, NULL)); GstElement *f = gst_bin_get_by_name (GST_BIN (m_pipeline), "f"); g_object_set (G_OBJECT (f), "location", ba.data (), NULL); gst_object_unref (GST_OBJECT (f)); g_free (pipeline); } m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this); gst_bus_enable_sync_message_emission (m_bus); g_signal_connect (m_bus, "sync-message", G_CALLBACK (sync_bus_call), this); gst_object_unref (m_bus); gst_element_set_state (GST_ELEMENT (this->m_pipeline), GST_STATE_PAUSED); GstState state = GST_STATE_PAUSED; if (gst_element_get_state (GST_ELEMENT (this->m_pipeline), &state, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) { qDebug ("failed to pause pipeline"); return; } }
static void gth_media_viewer_page_real_show (GthViewerPage *base) { GthMediaViewerPage *self; GError *error = NULL; GstBus *bus; char *uri; self = (GthMediaViewerPage*) base; if (self->priv->merge_id != 0) return; self->priv->merge_id = gtk_ui_manager_add_ui_from_string (gth_browser_get_ui_manager (self->priv->browser), media_viewer_ui_info, -1, &error); if (self->priv->merge_id == 0) { g_warning ("ui building failed: %s", error->message); g_error_free (error); } gth_viewer_page_focus (GTH_VIEWER_PAGE (self)); if (self->priv->playbin != NULL) return; self->priv->playbin = gst_element_factory_make ("playbin", "playbin"); g_signal_connect (self->priv->playbin, "notify::volume", G_CALLBACK (playbin_notify_volume_cb), self); bus = gst_pipeline_get_bus (GST_PIPELINE (self->priv->playbin)); gst_bus_enable_sync_message_emission (bus); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) set_playbin_window, self); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), self); if (self->priv->file_data == NULL) return; uri = g_file_get_uri (self->priv->file_data->file); g_object_set (G_OBJECT (self->priv->playbin), "uri", uri, NULL); gst_element_set_state (self->priv->playbin, GST_STATE_PAUSED); g_free (uri); }
static void empathy_video_widget_constructed (GObject *object) { EmpathyVideoWidgetPriv *priv = GET_PRIV (object); priv->videosink = gst_element_factory_make ("gconfvideosink", NULL); gst_object_ref (priv->videosink); gst_object_sink (priv->videosink); priv->sink_pad = gst_element_get_static_pad (priv->videosink, "sink"); fs_element_added_notifier_add (priv->notifier, GST_BIN (priv->videosink)); gst_bus_enable_sync_message_emission (priv->bus); g_signal_connect (priv->bus, "sync-message", G_CALLBACK (empathy_video_widget_sync_message_cb), object); gtk_widget_set_size_request (GTK_WIDGET (object), priv->min_width, priv->min_height); }
static VALUE rg_set_xwindow_id_with_buswatch(VALUE self, VALUE bus, VALUE x_window_id) { struct xid_callback_data* xid_cb_data; GObject *sink; xid_cb_data = g_slice_new(struct xid_callback_data); xid_cb_data->xid = NUM2ULL(x_window_id); xid_cb_data->bus = RVAL2GOBJ(bus); sink = RVAL2GOBJ(self); g_object_set_qdata_full(sink, xoverlay_xid_data_quark, xid_cb_data, xid_callback_data_destroy_cb); gst_bus_enable_sync_message_emission(GST_BUS(xid_cb_data->bus)); xid_cb_data->cb_id = g_signal_connect_object(xid_cb_data->bus, "sync-message", G_CALLBACK(bus_sync_func_cb), sink, 0); return self; }
int main (int argc, char **argv) { #ifdef WIN32 HGLRC sdl_gl_context = 0; HDC sdl_dc = 0; #else SDL_SysWMinfo info; Display *sdl_display = NULL; Window sdl_win = 0; GLXContext sdl_gl_context = NULL; #endif GMainLoop *loop = NULL; GstPipeline *pipeline = NULL; GstBus *bus = NULL; GstElement *fakesink = NULL; GstState state; GAsyncQueue *queue_input_buf = NULL; GAsyncQueue *queue_output_buf = NULL; const gchar *platform; /* Initialize SDL for video output */ if (SDL_Init (SDL_INIT_VIDEO) < 0) { fprintf (stderr, "Unable to initialize SDL: %s\n", SDL_GetError ()); return -1; } /* Create a 640x480 OpenGL screen */ if (SDL_SetVideoMode (640, 480, 0, SDL_OPENGL) == NULL) { fprintf (stderr, "Unable to create OpenGL screen: %s\n", SDL_GetError ()); SDL_Quit (); return -1; } /* Set the title bar in environments that support it */ SDL_WM_SetCaption ("SDL and gst-plugins-gl", NULL); /* Loop, drawing and checking events */ InitGL (640, 480); gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* retrieve and turn off sdl opengl context */ #ifdef WIN32 sdl_gl_context = wglGetCurrentContext (); sdl_dc = wglGetCurrentDC (); wglMakeCurrent (0, 0); platform = "wgl"; sdl_gl_display = gst_gl_display_new (); #else SDL_VERSION (&info.version); SDL_GetWMInfo (&info); /* FIXME: This display is different to the one that SDL uses to create the * GL context inside SDL_SetVideoMode() above which fails on Intel hardware */ sdl_display = info.info.x11.gfxdisplay; sdl_win = info.info.x11.window; sdl_gl_context = glXGetCurrentContext (); glXMakeCurrent (sdl_display, None, 0); platform = "glx"; sdl_gl_display = (GstGLDisplay *) gst_gl_display_x11_new_with_display (sdl_display); #endif sdl_context = gst_gl_context_new_wrapped (sdl_gl_display, (guintptr) sdl_gl_context, gst_gl_platform_from_string (platform), GST_GL_API_OPENGL); pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! video/x-raw, width=320, height=240, framerate=(fraction)30/1 ! " "gleffects effect=5 ! fakesink sync=1", NULL)); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), loop); g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), loop); g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), loop); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message", G_CALLBACK (sync_bus_call), NULL); gst_object_unref (bus); /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and * shared with the sdl one */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED); state = GST_STATE_PAUSED; if (gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) { g_debug ("failed to pause pipeline\n"); return -1; } /* turn on back sdl opengl context */ #ifdef WIN32 wglMakeCurrent (sdl_dc, sdl_gl_context); #else glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context); #endif /* append a gst-gl texture to this queue when you do not need it no more */ fakesink = gst_bin_get_by_name (GST_BIN (pipeline), "fakesink0"); g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL); g_signal_connect (fakesink, "handoff", G_CALLBACK (on_gst_buffer), NULL); queue_input_buf = g_async_queue_new (); queue_output_buf = g_async_queue_new (); g_object_set_data (G_OBJECT (fakesink), "queue_input_buf", queue_input_buf); g_object_set_data (G_OBJECT (fakesink), "queue_output_buf", queue_output_buf); g_object_set_data (G_OBJECT (fakesink), "loop", loop); gst_object_unref (fakesink); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); g_main_loop_run (loop); /* before to deinitialize the gst-gl-opengl context, * no shared context (here the sdl one) must be current */ #ifdef WIN32 wglMakeCurrent (0, 0); #else glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context); #endif gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); gst_object_unref (pipeline); /* turn on back sdl opengl context */ #ifdef WIN32 wglMakeCurrent (sdl_dc, sdl_gl_context); #else glXMakeCurrent (sdl_display, None, 0); #endif SDL_Quit (); /* make sure there is no pending gst gl buffer in the communication queues * between sdl and gst-gl */ while (g_async_queue_length (queue_input_buf) > 0) { GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_input_buf); gst_buffer_unref (buf); } while (g_async_queue_length (queue_output_buf) > 0) { GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_output_buf); gst_buffer_unref (buf); } return 0; }
void Bus::enableSyncMessageEmission() { gst_bus_enable_sync_message_emission(object<GstBus>()); }
int main (int argc, char **argv) { GstBus *bus; GOptionContext *ctx; GIOChannel *io_stdin; GError *err = NULL; gboolean res; GOptionEntry options[] = { {NULL} }; GThread *rthread; /* Clear application state */ memset (state, 0, sizeof (*state)); state->animate = TRUE; /* must initialise the threading system before using any other GLib funtion */ if (!g_thread_supported ()) g_thread_init (NULL); ctx = g_option_context_new ("[ADDITIONAL ARGUMENTS]"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing: %s\n", GST_STR_NULL (err->message)); exit (1); } g_option_context_free (ctx); if (argc != 2) { g_print ("Usage: %s <URI> or <PIPELINE-DESCRIPTION>\n", argv[0]); exit (1); } /* Initialize GStreamer */ gst_init (&argc, &argv); /* initialize inter thread comunnication */ init_intercom (state); TRACE_VC_MEMORY ("state 0"); if (!(rthread = g_thread_new ("render", (GThreadFunc) render_func, NULL))) { g_print ("Render thread create failed\n"); exit (1); } /* Initialize player */ if (gst_uri_is_valid (argv[1])) { res = init_playbin_player (state, argv[1]); } else { res = init_parse_launch_player (state, argv[1]); } if (!res) goto done; /* Create a GLib Main Loop and set it to run */ state->main_loop = g_main_loop_new (NULL, FALSE); /* Add a keyboard watch so we get notified of keystrokes */ io_stdin = g_io_channel_unix_new (fileno (stdin)); g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc) handle_keyboard, state); g_io_channel_unref (io_stdin); /* *INDENT-OFF* */ g_print ("Available commands: \n" " a - Toggle animation \n" " p - Pause playback \n" " r - Resume playback \n" " l - Query position/duration\n" " f - Seek 30 seconds forward \n" " b - Seek 30 seconds backward \n" " q - Quit \n"); /* *INDENT-ON* */ /* Connect the bus handlers */ bus = gst_element_get_bus (state->pipeline); gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, state, NULL); gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH); gst_bus_enable_sync_message_emission (bus); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback) error_cb, state); g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback) buffering_cb, state); g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback) eos_cb, state); g_signal_connect (G_OBJECT (bus), "message::qos", (GCallback) qos_cb, state); g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback) state_changed_cb, state); gst_object_unref (bus); /* Make player start playing */ gst_element_set_state (state->pipeline, GST_STATE_PLAYING); /* Start the mainloop */ state->main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (state->main_loop); done: /* Release pipeline */ if (state->pipeline) { gst_element_set_state (state->pipeline, GST_STATE_NULL); if (state->vsink) { gst_object_unref (state->vsink); state->vsink = NULL; } gst_object_unref (state->pipeline); } /* Unref the mainloop */ if (state->main_loop) { g_main_loop_unref (state->main_loop); } /* Stop rendering thread */ state->running = FALSE; g_thread_join (rthread); terminate_intercom (state); TRACE_VC_MEMORY ("at exit"); return 0; }
void detect_motion(){ //GMainLoop *loop; //guint bus_watch_id; /* Initialize GStreamer */ gst_init (NULL, NULL); //loop = g_main_loop_new (NULL, FALSE); /* Create the elements */ source = gst_element_factory_make ("rpicamsrc", "source"); filter = gst_element_factory_make ("capsfilter", "filter"); queue = gst_element_factory_make ("queue", "queue"); decoder = gst_element_factory_make ("omxh264dec", "decoder"); convert = gst_element_factory_make ("videoconvert", "convert"); motions = gst_element_factory_make ("motioncells", "motions"); sink = gst_element_factory_make ("autovideosink", "sink"); //rpicamsrc g_object_set (source, "exposure-mode", 2, NULL); g_object_set (source, "fullscreen", FALSE, NULL); g_object_set (filter, "caps", gst_caps_from_string("video/x-h264,width=320,height=240,framerate=5/1"), NULL); g_object_set (queue, "max_size_buffers", 4096, NULL); g_object_set (motions, "postallmotion", TRUE, NULL); // g_object_set(motions,"threshold",0.05,NULL); /* Create the empty pipeline */ pipeline = gst_pipeline_new ("test-pipeline"); if (!pipeline || !source || !sink || !filter || !queue || !decoder || !convert || !motions ) { g_printerr ("Not all elements could be created.\n"); return -1; } /* Build the pipeline */ gst_bin_add_many (GST_BIN (pipeline), source, filter, queue, decoder, convert, motions, sink, NULL); if (gst_element_link_many (source, filter, queue, decoder, convert, motions, sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return -1; } bus = gst_element_get_bus (pipeline); gst_bus_enable_sync_message_emission (bus); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::element", (GCallback) bus_call, NULL); //bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); //gst_object_unref (bus); /* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return -1; } //g_print ("Running...\n"); //g_main_loop_run (loop); /* Free resources */ //gst_object_unref (bus); //gst_element_set_state (pipeline, GST_STATE_NULL); //gst_object_unref (pipeline); //g_source_remove (bus_watch_id); //g_main_loop_unref (loop); }
static void run_shm_transmitter_test (gint flags) { GError *error = NULL; FsTransmitter *trans; FsStreamTransmitter *st; GstBus *bus = NULL; GParameter params[1]; GList *local_cands = NULL; GstStateChangeReturn ret; FsCandidate *cand; GList *remote_cands = NULL; int param_count = 0; gint bus_source; done = FALSE; connected_count = 0; g_cond_init (&cond); g_mutex_init (&test_mutex); buffer_count[0] = 0; buffer_count[1] = 0; received_known[0] = 0; received_known[1] = 0; got_candidates[0] = FALSE; got_candidates[1] = FALSE; got_prepared[0] = FALSE; got_prepared[1] = FALSE; if (unlink ("/tmp/src1") < 0 && errno != ENOENT) fail ("Could not unlink /tmp/src1: %s", strerror (errno)); if (unlink ("/tmp/src2") < 0 && errno != ENOENT) fail ("Could not unlink /tmp/src2: %s", strerror (errno)); local_cands = g_list_append (local_cands, fs_candidate_new (NULL, 1, FS_CANDIDATE_TYPE_HOST, FS_NETWORK_PROTOCOL_UDP, "/tmp/src1", 0)); local_cands = g_list_append (local_cands, fs_candidate_new (NULL, 2, FS_CANDIDATE_TYPE_HOST, FS_NETWORK_PROTOCOL_UDP, "/tmp/src2", 0)); if (flags & FLAG_LOCAL_CANDIDATES) { memset (params, 0, sizeof (GParameter)); params[0].name = "preferred-local-candidates"; g_value_init (¶ms[0].value, FS_TYPE_CANDIDATE_LIST); g_value_take_boxed (¶ms[0].value, local_cands); param_count = 1; } associate_on_source = !(flags & FLAG_NO_SOURCE); if ((flags & FLAG_NOT_SENDING) && (flags & FLAG_RECVONLY_FILTER)) { buffer_count[0] = 20; received_known[0] = 20; } trans = fs_transmitter_new ("shm", 2, 0, &error); if (error) ts_fail ("Error creating transmitter: (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); ts_fail_if (trans == NULL, "No transmitter create, yet error is still NULL"); g_clear_error (&error); if (flags & FLAG_RECVONLY_FILTER) ts_fail_unless (g_signal_connect (trans, "get-recvonly-filter", G_CALLBACK (get_recvonly_filter), NULL)); pipeline = setup_pipeline (trans, G_CALLBACK (_handoff_handler)); bus = gst_element_get_bus (pipeline); bus_source = gst_bus_add_watch (bus, bus_error_callback, NULL); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message::error", G_CALLBACK (sync_error_handler), NULL); gst_object_unref (bus); st = fs_transmitter_new_stream_transmitter (trans, NULL, param_count, params, &error); if (param_count) g_value_unset (¶ms[0].value); if (error) ts_fail ("Error creating stream transmitter: (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); ts_fail_if (st == NULL, "No stream transmitter created, yet error is NULL"); g_clear_error (&error); g_object_set (st, "sending", !(flags & FLAG_NOT_SENDING), NULL); ts_fail_unless (g_signal_connect (st, "new-local-candidate", G_CALLBACK (_new_local_candidate), trans), "Could not connect new-local-candidate signal"); ts_fail_unless (g_signal_connect (st, "local-candidates-prepared", G_CALLBACK (_candidate_prepared), NULL), "Could not connect local-candidates-prepared signal"); ts_fail_unless (g_signal_connect (st, "error", G_CALLBACK (stream_transmitter_error), NULL), "Could not connect error signal"); ts_fail_unless (g_signal_connect (st, "known-source-packet-received", G_CALLBACK (_known_source_packet_received), NULL), "Could not connect known-source-packet-received signal"); ts_fail_unless (g_signal_connect (st, "state-changed", G_CALLBACK (_state_changed), NULL), "Could not connect state-changed signal"); if (!fs_stream_transmitter_gather_local_candidates (st, &error)) { if (error) ts_fail ("Could not start gathering local candidates (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); else ts_fail ("Could not start gathering candidates" " (without a specified error)"); } else { ts_fail_unless (error == NULL); } g_clear_error (&error); ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); ts_fail_if (ret == GST_STATE_CHANGE_FAILURE, "Could not set the pipeline to playing"); if (!(flags & FLAG_LOCAL_CANDIDATES)) { ret = fs_stream_transmitter_force_remote_candidates (st, local_cands, &error); fs_candidate_list_destroy (local_cands); if (error) ts_fail ("Error while adding candidate: (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); ts_fail_unless (ret == TRUE, "No detailed error from add_remote_candidate"); } else { ts_fail_unless (error == NULL); } g_clear_error (&error); cand = fs_candidate_new (NULL, 1, FS_CANDIDATE_TYPE_HOST, FS_NETWORK_PROTOCOL_UDP, NULL, 0); cand->username = g_strdup ("/tmp/src1"); remote_cands = g_list_prepend (remote_cands, cand); cand = fs_candidate_new (NULL, 2, FS_CANDIDATE_TYPE_HOST, FS_NETWORK_PROTOCOL_UDP, NULL, 0); cand->username = g_strdup ("/tmp/src2"); remote_cands = g_list_prepend (remote_cands, cand); ret = fs_stream_transmitter_force_remote_candidates (st, remote_cands, &error); fs_candidate_list_destroy (remote_cands); if (error) ts_fail ("Error while adding candidate: (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); ts_fail_unless (ret == TRUE, "No detailed error from add_remote_candidate"); g_clear_error (&error); g_mutex_lock (&test_mutex); while (connected_count < 2) g_cond_wait (&cond, &test_mutex); g_mutex_unlock (&test_mutex); setup_fakesrc (trans, pipeline, 1); setup_fakesrc (trans, pipeline, 2); g_mutex_lock (&test_mutex); while (!done) g_cond_wait (&cond, &test_mutex); g_mutex_unlock (&test_mutex); fail_unless (got_prepared[0] == TRUE); fail_unless (got_prepared[1] == TRUE); fail_unless (got_candidates[0] == TRUE); fail_unless (got_candidates[1] == TRUE); gst_element_set_state (pipeline, GST_STATE_NULL); if (st) { fs_stream_transmitter_stop (st); g_object_unref (st); } g_object_unref (trans); g_source_remove (bus_source); gst_object_unref (pipeline); g_cond_clear (&cond); g_mutex_clear (&test_mutex); }
bool GstVideoPlayerBackend::start(const QUrl &url) { Q_ASSERT(!m_pipeline); if (state() == PermanentError || m_pipeline) return false; if (!m_sink) { setError(true, QLatin1String("Internal error: improper usage")); return false; } /* Pipeline */ m_pipeline = gst_pipeline_new("stream"); if (!m_pipeline) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("stream"))); return false; } // TODO: uncomment when vaapi download starts works // enableFactory("vaapidecode", m_useHardwareDecoding); enableFactory("vaapidecode", false); /* Buffered HTTP source */ setVideoBuffer(new VideoHttpBuffer(url)); GstElement *source = m_videoBuffer->setupSrcElement(m_pipeline); if (!source) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("source"))); setVideoBuffer(0); return false; } m_videoBuffer->startBuffering(); GstElement *demuxer = gst_element_factory_make("matroskademux","avi-demuxer"); g_signal_connect(demuxer, "pad-added", G_CALLBACK(staticDemuxerPadReady), this); g_signal_connect(demuxer, "no-more-pads", G_CALLBACK(staticDemuxerNoMorePads), this); gst_bin_add(GST_BIN(m_pipeline), demuxer); if (!gst_element_link(source, demuxer)) { setError(true, tr("Failed to create pipeline (%1)").arg(QLatin1String("demuxer"))); return false; } if (!setupVideoPipeline() || !setupAudioPipeline()) return false; m_playbackSpeed = 1.0; /* We handle all messages in the sync handler, because we can't run a glib event loop. * Although linux does use glib's loop (and we could take advantage of that), it's better * to handle everything this way for windows and mac support. */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_enable_sync_message_emission(bus); gst_bus_set_sync_handler(bus, staticBusHandler, this); gst_object_unref(bus); /* Move the pipeline into the PLAYING state. This call may block for a very long time * (up to several seconds), because it will block until the pipeline has completed that move. */ gst_element_set_state(m_pipeline, GST_STATE_READY); return true; }
bool VideoPlayerBackend::start(const QUrl &url) { Q_ASSERT(!m_pipeline); if (state() == PermanentError || m_pipeline) return false; if (!m_sink) { setError(true, QLatin1String("Internal error: improper usage")); return false; } /* Pipeline */ m_pipeline = gst_pipeline_new("stream"); if (!m_pipeline) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("stream"))); return false; } /* Buffered HTTP source */ setVideoBuffer(new VideoHttpBuffer(url)); GstElement *source = m_videoBuffer->setupSrcElement(m_pipeline); if (!source) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("source"))); setVideoBuffer(0); return false; } m_videoBuffer->startBuffering(); /* Decoder */ GstElement *decoder = gst_element_factory_make("decodebin2", "decoder"); if (!decoder) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("decoder"))); return false; } g_object_set(G_OBJECT(decoder), "use-buffering", TRUE, "max-size-time", 10 * GST_SECOND, NULL); g_signal_connect(decoder, "new-decoded-pad", G_CALLBACK(staticDecodePadReady), this); /* Colorspace conversion (no-op if unnecessary) */ GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace"); if (!colorspace) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("colorspace"))); return false; } gst_bin_add_many(GST_BIN(m_pipeline), decoder, colorspace, m_sink, NULL); if (!gst_element_link(source, decoder)) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("link decoder"))); return false; } if (!gst_element_link(colorspace, m_sink)) { setError(true, tr("Failed to create video pipeline (%1)").arg(QLatin1String("link sink"))); return false; } /* This is the element that is linked to the decoder for video output; it will be linked when decodePadReady * gives us the video pad. */ m_videoLink = colorspace; m_playbackSpeed = 1.0; /* We handle all messages in the sync handler, because we can't run a glib event loop. * Although linux does use glib's loop (and we could take advantage of that), it's better * to handle everything this way for windows and mac support. */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_enable_sync_message_emission(bus); gst_bus_set_sync_handler(bus, staticBusHandler, this); gst_object_unref(bus); /* Move the pipeline into the PLAYING state. This call may block for a very long time * (up to several seconds), because it will block until the pipeline has completed that move. */ gst_element_set_state(m_pipeline, GST_STATE_READY); return true; }
static gboolean build_pipeline (GbpPlayer *player) { GstElement *autovideosink; GstElement *audiosink; if (player->priv->pipeline != NULL) { gst_element_set_state (GST_ELEMENT (player->priv->pipeline), GST_STATE_NULL); g_object_unref (player->priv->pipeline); } player->priv->pipeline = GST_PIPELINE (gst_element_factory_make ("playbin2", NULL)); if (player->priv->pipeline == NULL) { /* FIXME: create our domain */ GError *error = g_error_new (GST_LIBRARY_ERROR, GST_LIBRARY_ERROR_FAILED, "couldn't find playbin"); g_signal_emit (player, player_signals[SIGNAL_ERROR], 0, error, "more debug than that?"); g_error_free (error); return FALSE; } autovideosink = gst_element_factory_make (player->priv->video_sink, NULL); if (autovideosink == NULL) { GError *error = g_error_new (GST_LIBRARY_ERROR, GST_LIBRARY_ERROR_FAILED, "couldn't find autovideosink"); g_signal_emit (player, player_signals[SIGNAL_ERROR], 0, error, "more debug than that?"); g_error_free (error); g_object_unref (player->priv->pipeline); player->priv->pipeline = NULL; return FALSE; } if (player->priv->have_audio) { audiosink = gst_element_factory_make("autoaudiosink", NULL); } else { audiosink = gst_element_factory_make ("fakesink", NULL); g_object_set (audiosink, "sync", TRUE, NULL); } if (audiosink == NULL) { GError *error = g_error_new (GST_LIBRARY_ERROR, GST_LIBRARY_ERROR_FAILED, "couldn't find %s", player->priv->have_audio ? "autoaudiosink" : "fakesink"); g_signal_emit (player, player_signals[SIGNAL_ERROR], 0, error, "more debug than that?"); g_error_free (error); g_object_unref (player->priv->pipeline); player->priv->pipeline = NULL; return FALSE; } g_object_set (G_OBJECT (player->priv->pipeline), "video-sink", autovideosink, NULL); g_object_set (G_OBJECT (player->priv->pipeline), "audio-sink", audiosink, NULL); player->priv->bus = gst_pipeline_get_bus (player->priv->pipeline); gst_bus_enable_sync_message_emission (player->priv->bus); g_object_connect (player->priv->bus, "signal::sync-message::state-changed", G_CALLBACK (on_bus_state_changed_cb), player, "signal::sync-message::eos", G_CALLBACK (on_bus_eos_cb), player, "signal::sync-message::error", G_CALLBACK (on_bus_error_cb), player, "signal::sync-message::element", G_CALLBACK (on_bus_element_cb), player, NULL); g_object_connect (player->priv->pipeline, "signal::notify::source", playbin_source_cb, player, NULL); g_object_connect (autovideosink, "signal::element-added", autovideosink_element_added_cb, player, NULL); g_object_set (player->priv->pipeline, "volume", player->priv->volume, NULL); player->priv->have_pipeline = TRUE; return TRUE; }
int main (int argc, char **argv) { #ifdef WIN32 HGLRC sdl_gl_context = 0; HDC sdl_dc = 0; #else SDL_SysWMinfo info; Display *sdl_display = NULL; Window sdl_win = 0; GLXContext sdl_gl_context = NULL; #endif GMainLoop *loop = NULL; GstPipeline *pipeline = NULL; GstBus *bus = NULL; GstElement *glimagesink = NULL; const gchar *platform; /* Initialize SDL for video output */ if (SDL_Init (SDL_INIT_VIDEO) < 0) { fprintf (stderr, "Unable to initialize SDL: %s\n", SDL_GetError ()); return -1; } /* Create a 640x480 OpenGL screen */ if (SDL_SetVideoMode (640, 480, 0, SDL_OPENGL) == NULL) { fprintf (stderr, "Unable to create OpenGL screen: %s\n", SDL_GetError ()); SDL_Quit (); return -1; } /* Set the title bar in environments that support it */ SDL_WM_SetCaption ("SDL and gst-plugins-gl", NULL); /* Loop, drawing and checking events */ InitGL (640, 480); gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* retrieve and turn off sdl opengl context */ #ifdef WIN32 sdl_gl_context = wglGetCurrentContext (); sdl_dc = wglGetCurrentDC (); wglMakeCurrent (0, 0); platform = "wgl"; sdl_gl_display = gst_gl_display_new (); #else SDL_VERSION (&info.version); SDL_GetWMInfo (&info); /* FIXME: This display is different to the one that SDL uses to create the * GL context inside SDL_SetVideoMode() above which fails on Intel hardware */ sdl_display = info.info.x11.gfxdisplay; sdl_win = info.info.x11.window; sdl_gl_context = glXGetCurrentContext (); glXMakeCurrent (sdl_display, None, 0); platform = "glx"; sdl_gl_display = (GstGLDisplay *) gst_gl_display_x11_new_with_display (sdl_display); #endif sdl_context = gst_gl_context_new_wrapped (sdl_gl_display, (guintptr) sdl_gl_context, gst_gl_platform_from_string (platform), GST_GL_API_OPENGL); pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! video/x-raw, width=320, height=240, framerate=(fraction)30/1 ! " "glimagesink name=glimagesink0", NULL)); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), loop); g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), loop); g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), loop); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message", G_CALLBACK (sync_bus_call), NULL); gst_object_unref (bus); glimagesink = gst_bin_get_by_name (GST_BIN (pipeline), "glimagesink0"); g_signal_connect (G_OBJECT (glimagesink), "client-draw", G_CALLBACK (on_client_draw), NULL); gst_object_unref (glimagesink); /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and * shared with the sdl one */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED); /* turn on back sdl opengl context */ #ifdef WIN32 wglMakeCurrent (sdl_dc, sdl_gl_context); #else glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context); #endif gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); g_timeout_add (100, update_sdl_scene, pipeline); g_main_loop_run (loop); /* before to deinitialize the gst-gl-opengl context, * no shared context (here the sdl one) must be current */ #ifdef WIN32 wglMakeCurrent (0, 0); #else glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context); #endif gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); gst_object_unref (pipeline); /* turn on back sdl opengl context */ #ifdef WIN32 wglMakeCurrent (sdl_dc, sdl_gl_context); #else glXMakeCurrent (sdl_display, None, 0); #endif SDL_Quit (); return 0; }
void _receive_video_init_gstreamer(NiceAgent *magent, guint stream_id, CustomData *data) { GstElement *pipeline, *source, *capsfilter, *videoconvert, *h263p, *rtph263pdepay, *sink; GstBus *bus; GstMessage *msg; GstStateChangeReturn ret; GSource *bus_source; GST_INFO ("Pipeline initialization"); // TODO: figure out showing video source = gst_element_factory_make ("udpsrc", "source"); //videoconvert = gst_element_factory_make ("videoconvert", "convert"); //capsfilter = gst_element_factory_make ("capsfilter", "caps"); rtph263pdepay = gst_element_factory_make ("rtph263pdepay", "rtph263pdepay"); h263p = gst_element_factory_make ("avdec_h263p", "h263p"); sink = gst_element_factory_make ("autovideosink", "sink"); /* g_object_set (source, "agent", magent, NULL); g_object_set (source, "stream", stream_id, NULL); g_object_set (source, "component", 1, NULL); */ g_object_set (source, "address", "127.0.0.1", NULL); g_object_set (source, "port", 1234, NULL); g_object_set (source, "caps", gst_caps_from_string("application/x-rtp"), NULL); /* g_object_set (source, "caps", gst_caps_from_string( "application/x-rtp\,\ media\=\(string\)video\,\ " "clock-rate\=\(int\)90000\,\ " "encoding-name\=\(string\)H263-1998\,\ " "payload\=\(int\)96"), NULL); */ //g_object_set (sink, "sync", FALSE, NULL); pipeline = gst_pipeline_new ("Video receive pipeline"); if (!pipeline || !source || //!capsfilter || !h263p || !rtph263pdepay || !sink) { g_printerr ("Not all elements could be created.\n"); return; } // Build the pipeline gst_bin_add_many (GST_BIN (pipeline), source, //capsfilter, rtph263pdepay, h263p, sink, NULL); if (gst_element_link_many (source, //capsfilter, rtph263pdepay, h263p, sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return; } // TODO: this is just output dump pipeline /* source = gst_element_factory_make ("nicesrc", "source"); sink = gst_element_factory_make ("fakesink", "sink"); g_object_set (source, "agent", magent, NULL); g_object_set (source, "stream", stream_id, NULL); g_object_set (source, "component", 1, NULL); g_object_set (sink, "dump", 1, NULL); pipeline = gst_pipeline_new ("Video send pipeline"); if (!pipeline || !source || !sink) { g_printerr ("Not all elements could be created.\n"); return; } // Build the pipeline gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL); if (gst_element_link (source, sink) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return; } */ GST_INFO ("Pipeline created, registing on bus"); bus = gst_element_get_bus (pipeline); gst_bus_enable_sync_message_emission (bus); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::error", (GCallback) on_error, NULL); GST_INFO ("Registing pipeline on bus"); data->pipeline = pipeline; ret = gst_element_set_state(data->pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return; } }
/** * gst_device_monitor_add_filter: * @monitor: a device monitor * @classes: (allow-none): device classes to use as filter or %NULL for any class * @caps: (allow-none): the #GstCaps to filter or %NULL for ANY * * Adds a filter for which #GstDevice will be monitored, any device that matches * all classes and the #GstCaps will be returned. * * Filters must be added before the #GstDeviceMonitor is started. * * Returns: The id of the new filter or 0 if no provider matched the filter's * classes. * * Since: 1.4 */ guint gst_device_monitor_add_filter (GstDeviceMonitor * monitor, const gchar * classes, GstCaps * caps) { GList *factories = NULL; struct DeviceFilter *filter; guint id = 0; gboolean matched = FALSE; g_return_val_if_fail (GST_IS_DEVICE_MONITOR (monitor), 0); g_return_val_if_fail (!monitor->priv->started, 0); GST_OBJECT_LOCK (monitor); filter = g_slice_new0 (struct DeviceFilter); filter->id = monitor->priv->last_id++; if (caps) filter->caps = gst_caps_ref (caps); else filter->caps = gst_caps_new_any (); if (classes) filter->classesv = g_strsplit (classes, "/", 0); factories = gst_device_provider_factory_list_get_device_providers (1); while (factories) { GstDeviceProviderFactory *factory = factories->data; if (gst_device_provider_factory_has_classesv (factory, filter->classesv)) { GstDeviceProvider *provider; provider = gst_device_provider_factory_get (factory); if (provider) { guint i; for (i = 0; i < monitor->priv->providers->len; i++) { if (g_ptr_array_index (monitor->priv->providers, i) == provider) { gst_object_unref (provider); provider = NULL; matched = TRUE; break; } } } if (provider) { GstBus *bus = gst_device_provider_get_bus (provider); update_hidden_providers_list (&monitor->priv->hidden, provider); g_signal_connect (provider, "provider-hidden", (GCallback) provider_hidden, monitor); g_signal_connect (provider, "provider-unhidden", (GCallback) provider_unhidden, monitor); matched = TRUE; gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message", G_CALLBACK (bus_sync_message), monitor); gst_object_unref (bus); g_ptr_array_add (monitor->priv->providers, provider); monitor->priv->cookie++; } } factories = g_list_remove (factories, factory); gst_object_unref (factory); } /* Ensure there is no leak here */ g_assert (factories == NULL); if (matched) { id = filter->id; g_ptr_array_add (monitor->priv->filters, filter); } else { device_filter_free (filter); } GST_OBJECT_UNLOCK (monitor); return id; }
static void run_rawudp_transmitter_test (gint n_parameters, GParameter *params, gint flags) { GError *error = NULL; FsTransmitter *trans; FsStreamTransmitter *st; GstBus *bus = NULL; guint tos; buffer_count[0] = 0; buffer_count[1] = 0; received_known[0] = 0; received_known[1] = 0; pipeline_done = FALSE; has_stun = flags & FLAG_HAS_STUN; associate_on_source = !(flags & FLAG_NO_SOURCE); if ((flags & FLAG_NOT_SENDING) && (flags & FLAG_RECVONLY_FILTER)) { buffer_count[0] = 20; received_known[0] = 20; } loop = g_main_loop_new (NULL, FALSE); trans = fs_transmitter_new ("rawudp", 2, 0, &error); if (error) { ts_fail ("Error creating transmitter: (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); } ts_fail_if (trans == NULL, "No transmitter create, yet error is still NULL"); g_object_set (trans, "tos", 2, NULL); g_object_get (trans, "tos", &tos, NULL); ts_fail_unless (tos == 2); if (flags & FLAG_RECVONLY_FILTER) ts_fail_unless (g_signal_connect (trans, "get-recvonly-filter", G_CALLBACK (get_recvonly_filter), NULL)); pipeline = setup_pipeline (trans, G_CALLBACK (_handoff_handler)); bus = gst_element_get_bus (pipeline); gst_bus_add_watch (bus, bus_error_callback, NULL); gst_bus_enable_sync_message_emission (bus); g_signal_connect (bus, "sync-message::error", G_CALLBACK (sync_error_handler), NULL); gst_object_unref (bus); st = fs_transmitter_new_stream_transmitter (trans, NULL, n_parameters, params, &error); if (error) { if (has_stun && error->domain == FS_ERROR && error->code == FS_ERROR_NETWORK && error->message && strstr (error->message, "unreachable")) { GST_WARNING ("Skipping stunserver test, we have no network"); goto skip; } else ts_fail ("Error creating stream transmitter: (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); } ts_fail_if (st == NULL, "No stream transmitter created, yet error is NULL"); g_object_set (st, "sending", !(flags & FLAG_NOT_SENDING), NULL); ts_fail_unless (g_signal_connect (st, "new-local-candidate", G_CALLBACK (_new_local_candidate), GINT_TO_POINTER (flags)), "Could not connect new-local-candidate signal"); ts_fail_unless (g_signal_connect (st, "local-candidates-prepared", G_CALLBACK (_local_candidates_prepared), GINT_TO_POINTER (flags)), "Could not connect local-candidates-prepared signal"); ts_fail_unless (g_signal_connect (st, "new-active-candidate-pair", G_CALLBACK (_new_active_candidate_pair), trans), "Could not connect new-active-candidate-pair signal"); ts_fail_unless (g_signal_connect (st, "error", G_CALLBACK (stream_transmitter_error), NULL), "Could not connect error signal"); ts_fail_unless (g_signal_connect (st, "known-source-packet-received", G_CALLBACK (_known_source_packet_received), NULL), "Could not connect known-source-packet-received signal"); ts_fail_if (gst_element_set_state (pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE, "Could not set the pipeline to playing"); if (!fs_stream_transmitter_gather_local_candidates (st, &error)) { if (error) { ts_fail ("Could not start gathering local candidates (%s:%d) %s", g_quark_to_string (error->domain), error->code, error->message); } else ts_fail ("Could not start gathering candidates" " (without a specified error)"); } g_idle_add (check_running, NULL); g_main_loop_run (loop); skip: g_mutex_lock (&pipeline_mod_mutex); pipeline_done = TRUE; g_mutex_unlock (&pipeline_mod_mutex); gst_element_set_state (pipeline, GST_STATE_NULL); if (st) { fs_stream_transmitter_stop (st); g_object_unref (st); } g_object_unref (trans); gst_object_unref (pipeline); g_main_loop_unref (loop); }
VideoWidget::VideoWidget(GstBus *bus, QWidget *parent, const QString &name, const QString &video_sink_name) : Communication::VideoPlaybackWidgetInterface(parent), bus_((GstBus *) gst_object_ref(bus)), video_overlay_(0), video_playback_element_(0), video_playback_bin_(0), name_(name), window_id_(0), on_element_added_g_signal_(0), on_sync_message_g_signal_(0) { qDebug() << "VideoWidget " << name << " INIT STARTED"; setWindowTitle(name); // Element notifier init notifier_ = fs_element_added_notifier_new(); on_element_added_g_signal_ = g_signal_connect(notifier_, "element-added", G_CALLBACK(&VideoWidget::OnElementAdded), this); // UNIX -> autovideosink #ifdef Q_WS_X11 qt_x11_set_global_double_buffer(false); //video_playback_element_ = gst_element_factory_make(video_sink_name.toStdString().c_str(), 0); //gst_object_ref(video_playback_element_); //gst_object_sink(video_playback_element_); //fs_element_added_notifier_add(notifier_, GST_BIN(video_playback_element_)); #endif // WINDOWS -> autovideosink will chose one of there: glimagesink (best), directdrawsink (possible buffer errors), dshowvideosink (possible buffer errors) // X11 -> video_playback_element_ = gst_element_factory_make(video_sink_name.toStdString().c_str(), 0); if (!video_playback_element_) { qDebug() << "VideoWidget " << name << " CANNOT CREATE video_playback_element_ (" << video_sink_name <<")"; return; } // Video bin init const QString video_bin_name = "video_bin_for_" + name; video_playback_bin_ = gst_bin_new(video_bin_name.toStdString().c_str()); if (!video_playback_bin_) { qDebug() << "VideoWidget " << name << " CANNOT CREATE video_bin_"; return; } // Add playback element to video bin gst_bin_add(GST_BIN(video_playback_bin_), video_playback_element_); // Pad inits GstPad *static_sink_pad = gst_element_get_static_pad(video_playback_element_, "sink"); GstPad *sink_ghost_pad = gst_ghost_pad_new("sink", static_sink_pad); // Add pad to video bin gst_element_add_pad(GST_ELEMENT(video_playback_bin_), sink_ghost_pad); gst_object_unref(G_OBJECT(static_sink_pad)); gst_object_ref(video_playback_bin_); gst_object_sink(video_playback_bin_); fs_element_added_notifier_add(notifier_, GST_BIN(video_playback_bin_)); gst_bus_enable_sync_message_emission(bus_); on_sync_message_g_signal_ = g_signal_connect(bus_, "sync-message", G_CALLBACK(&VideoWidget::OnSyncMessage), this); qDebug() << "VideoWidget " << name << " INIT COMPLETE"; // QWidget properties QPalette palette; palette.setColor(QPalette::Background, Qt::black); palette.setColor(QPalette::Window, Qt::black); setPalette(palette); // Show nothing and lets put qwidgets as normal external windows //setAutoFillBackground(true); //setAttribute(Qt::WA_NoSystemBackground, true); //setAttribute(Qt::WA_PaintOnScreen, true); setWindowFlags(Qt::Dialog); resize(VIDEO_WIDTH, VIDEO_HEIGHT); setMinimumSize(VIDEO_WIDTH, VIDEO_HEIGHT); }
void _video_send_init_gstreamer(NiceAgent *magent, guint stream_id) { //GstElement *pipeline, *rpicamsrc, *capsfilter, *h264parse, *rtph264pay, *nicesink; GstElement *rpicamsrc, *capsfilter, *h264parse, *rtph264pay, *nicesink; //GstBus *bus; GstMessage *msg; GstStateChangeReturn ret; /* Initialize GStreamer */ gst_init (NULL, NULL); rpicamsrc = gst_element_factory_make ("rpicamsrc", NULL); capsfilter = gst_element_factory_make ("capsfilter", NULL); h264parse = gst_element_factory_make ("h264parse", NULL); rtph264pay = gst_element_factory_make ("rtph264pay", NULL); nicesink = gst_element_factory_make ("nicesink", NULL); //rpicamsrc g_object_set (rpicamsrc, "bitrate", 300000, NULL); g_object_set (rpicamsrc, "rotation", 180, NULL); //g_object_set (rpicamsrc, "exposure-mode", 9, NULL); g_object_set (rpicamsrc, "video-stabilisation", TRUE, NULL); g_object_set (capsfilter, "caps", gst_caps_from_string("video/x-h264,width=640,height=480,framerate=25/1"), NULL); //rtph264pay g_object_set (rtph264pay, "pt", 96, NULL); g_object_set (rtph264pay, "config-interval", 1, NULL); //Set properties g_object_set (nicesink, "agent", magent, NULL); g_object_set (nicesink, "stream", stream_id, NULL); g_object_set (nicesink, "component", 1, NULL); /// Create the empty pipeline //pipeline = gst_pipeline_new ("test-pipeline"); RpiData_SendVideo->pipeline = gst_pipeline_new ("send-video-pipeline"); if (!RpiData_SendVideo->pipeline || !rpicamsrc ||!capsfilter || !h264parse || !rtph264pay|| !nicesink) { g_printerr ("Not all elements could be created.\n"); return -1; } /// Build the pipeline gst_bin_add_many (GST_BIN (RpiData_SendVideo->pipeline), rpicamsrc, capsfilter, h264parse, rtph264pay, nicesink, NULL); if (gst_element_link_many ( rpicamsrc, capsfilter, h264parse, rtph264pay, nicesink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (RpiData_SendVideo->pipeline); return -1; } RpiData_SendVideo->bus = gst_element_get_bus (RpiData_SendVideo->pipeline); gst_bus_enable_sync_message_emission (RpiData_SendVideo->bus); gst_bus_add_signal_watch (RpiData_SendVideo->bus); g_signal_connect (RpiData_SendVideo->bus, "message::error", (GCallback) on_error_video, NULL); // Start playing ret = gst_element_set_state (RpiData_SendVideo->pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (RpiData_SendVideo->pipeline); return -1; } }