Esempio n. 1
0
int tsmf_window_create(TSMFGstreamerDecoder* decoder)
{
	struct X11Handle* hdl;

	if (decoder->media_type != TSMF_MAJOR_TYPE_VIDEO)
	{
		decoder->ready = TRUE;
		return -3;
	}
	else
	{
#if GST_VERSION_MAJOR > 0
		GstVideoOverlay *overlay = GST_VIDEO_OVERLAY(decoder->outsink);
#else
		GstXOverlay *overlay = GST_X_OVERLAY(decoder->outsink);
#endif

		if (!decoder)
			return -1;

		if (!decoder->platform)
			return -1;

		hdl = (struct X11Handle*) decoder->platform;

		if (!hdl->subwin)
		{
			int event, error;
			hdl->subwin = XCreateSimpleWindow(hdl->disp, *(int *)hdl->xfwin, 0, 0, 1, 1, 0, 0, 0);

			if (!hdl->subwin)
			{
				WLog_ERR(TAG, "Could not create subwindow!");
			}

			XMapWindow(hdl->disp, hdl->subwin);
			XSync(hdl->disp, FALSE);
#if GST_VERSION_MAJOR > 0
			gst_video_overlay_set_window_handle(overlay, hdl->subwin);
#else
			gst_x_overlay_set_window_handle(overlay, hdl->subwin);
#endif
			decoder->ready = TRUE;
#if defined(WITH_XEXT)
			hdl->has_shape = XShapeQueryExtension(hdl->disp, &event, &error);
#endif
		}

#if GST_VERSION_MAJOR > 0
		gst_video_overlay_handle_events(overlay, TRUE);
#else
		gst_x_overlay_handle_events(overlay, TRUE);
#endif
		return 0;
	}
}
Esempio n. 2
0
int main(int argc, char *argv[])
{
#if !GLIB_CHECK_VERSION (2, 31, 0)
  if (!g_thread_supported ())
    g_thread_init (NULL);
#endif

  gst_init (&argc, &argv);
  QApplication app(argc, argv);
  app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));

  /* prepare the pipeline */

  GstElement *pipeline = gst_pipeline_new ("xvoverlay");
  GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
  GstElement *sink = find_video_sink ();

  if (sink == NULL)
    g_error ("Couldn't find a working video sink.");

  gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
  gst_element_link (src, sink);
  
  /* prepare the ui */

  QWidget window;
  window.resize(320, 240);
  window.setWindowTitle("GstXOverlay Qt demo");
  window.show();
  
  WId xwinid = window.winId();
  gst_x_overlay_set_window_handle (GST_X_OVERLAY (sink), xwinid);

  /* run the pipeline */

  GstStateChangeReturn sret = gst_element_set_state (pipeline,
      GST_STATE_PLAYING);
  if (sret == GST_STATE_CHANGE_FAILURE) {
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    /* Exit application */
    QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
  }

  int ret = app.exec();
  
  window.hide();
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return ret;
}
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
  CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
  if (!data) return;
  GST_DEBUG ("Releasing Native Window %p", data->native_window);

  if (data->pipeline) {
    gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), (guintptr)NULL);
    gst_element_set_state (data->pipeline, GST_STATE_READY);
  }

  ANativeWindow_release (data->native_window);
  data->native_window = NULL;
  data->initialized = FALSE;
}
Esempio n. 4
0
static GstBusSyncReply
bus_sync_handler (G_GNUC_UNUSED GstBus *bus, GstMessage *message,
    gpointer userdata)
{
  if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT)
    return GST_BUS_PASS;

  if (!gst_structure_has_name (message->structure, "prepare-xwindow-id"))
    return GST_BUS_PASS;

  gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC (message)),
      GPOINTER_TO_UINT (userdata));

  return GST_BUS_PASS;
}
Esempio n. 5
0
int main (int argc, char *argv[])
{
    /* Initialisation */
    gst_init (&argc, &argv);

    /* Initialize GTK */
    gtk_init (&argc, &argv);

    /* initialize out gstreamer pipeline */
    create_pipeline();

    /* create our window that shall display everything */
    w = create_ui();
    
    /* do not display the video in own frame but integrate it into our window */
    gst_x_overlay_set_window_handle(GST_X_OVERLAY(p.sink), GDK_WINDOW_XID(w->video_window->window));

    /* Set the pipeline to "playing" state */
    g_print ("Now playing stream from: %s\n", CAMERA);

    /*
       IMPORTANT after state changed to playing the capabilities are fixed.
     */
    gst_element_set_state (p.pipeline, GST_STATE_PLAYING);

    g_print ("Running...");

    /* finished construction and now run the main loop while waiting for user interaction */
    g_main_loop_run (p.loop);

    
    /* ===== clean up ===== */

    /* Out of the main loop, clean up nicely */
    g_print ("Returned, stopping playback\n");
    gst_element_set_state (p.pipeline, GST_STATE_PAUSED);
    gst_element_set_state (p.pipeline, GST_STATE_NULL);

    g_print("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (p.pipeline));
    g_source_remove (p.bus_watch_id);
    g_main_loop_unref (p.loop);

    free(w);
    w = NULL;

    return 0;
}
/* Check if all conditions are met to report GStreamer as initialized.
 * These conditions will change depending on the application */
static void check_initialization_complete (CustomData *data) {
  JNIEnv *env = get_jni_env ();
  if (!data->initialized && data->native_window && data->main_loop) {
    GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window, data->main_loop);

    /* The main loop is running and we received a native window, inform the sink about it */
    gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), (guintptr)data->native_window);

    (*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id);
    if ((*env)->ExceptionCheck (env)) {
      GST_ERROR ("Failed to call Java method");
      (*env)->ExceptionClear (env);
    }
    data->initialized = TRUE;
  }
}
Esempio n. 7
0
/*
 * @brief callback when GTK creates physical window
 *        retrive handle and provide to gstreamer through XOverlay interface
 */
static void realise_cb (GtkWidget *widget, CustomData *data) {
	GdkWindow *window = gtk_widget_get_window (widget);
	guintptr window_handle;

	/* why so serius? */
	if (!gdk_window_ensure_native (window)) {
		g_error ("couldn't create native window needed for GstXOverlay!");
	}

	/* get window handle */
#ifdef GDK_WINDOWING_X11
	window_handle = GDK_WINDOW_XID(window);
#else
#error "Unsupported platform!!"
#endif
	/* pass window to xoverlay of playbin2 */
	gst_x_overlay_set_window_handle(GST_X_OVERLAY (data->playbin2), window_handle);
}
Esempio n. 8
0
static GstBusSyncReply
bus_sync_callback (GstBus * bus, GstMessage * message, gpointer data)
{
  GtkWidget *ui_drawing;

  if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT)
    return GST_BUS_PASS;

  if (!gst_structure_has_name (message->structure, "prepare-xwindow-id"))
    return GST_BUS_PASS;

  /* FIXME: make sure to get XID in main thread */
  ui_drawing = GTK_WIDGET (gtk_builder_get_object (builder, "viewfinderArea"));
  gst_x_overlay_set_window_handle (GST_X_OVERLAY (message->src),
      GDK_WINDOW_XWINDOW (gtk_widget_get_window (ui_drawing)));

  gst_message_unref (message);
  return GST_BUS_DROP;
}
Esempio n. 9
0
void GStreamerGWorld::setWindowOverlay(GstMessage* message)
{
    GstObject* sink = GST_MESSAGE_SRC(message);

    if (!GST_IS_X_OVERLAY(sink))
        return;

    if (g_object_class_find_property(G_OBJECT_GET_CLASS(sink), "force-aspect-ratio"))
        g_object_set(sink, "force-aspect-ratio", TRUE, NULL);

    if (m_videoWindow) {
        m_videoWindow->prepareForOverlay(message);
#if GST_CHECK_VERSION(0, 10, 31) || GST_VERSION_NANO
        gst_x_overlay_set_window_handle(GST_X_OVERLAY(sink), m_videoWindow->videoWindowId());
#else
        gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(sink), m_videoWindow->videoWindowId());
#endif
    }
}
Esempio n. 10
0
int main(int argc, char *argv[])
{
    if (!g_thread_supported())
        g_thread_init(NULL);

    if (argc != 2) {
        return 2;
    }

    gst_init(&argc, &argv);
    QApplication app(argc, argv);
    app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit()));

    m_pipeline = gst_element_factory_make("playbin2", NULL);
    g_object_set(m_pipeline, "uri", argv[1], NULL);

    QWidget window;
    window.resize(640, 480);
    window.show();
    m_xwinid = window.winId();

    gst_x_overlay_set_window_handle(GST_X_OVERLAY(m_pipeline), m_xwinid);

    GstStateChangeReturn sret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
    if (sret == GST_STATE_CHANGE_FAILURE)
    {
        gst_element_set_state(m_pipeline, GST_STATE_NULL);
        gst_object_unref(m_pipeline);
        // Exit application
        QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
    }

    int ret = app.exec();

    window.hide();
    gst_element_set_state(m_pipeline, GST_STATE_NULL);
    gst_object_unref(m_pipeline);

    return ret;
}
Esempio n. 11
0
void GStreamerGWorld::setWindowOverlay(GstMessage* message)
{
    GstObject* sink = GST_MESSAGE_SRC(message);

    if (!GST_IS_X_OVERLAY(sink))
        return;

    if (g_object_class_find_property(G_OBJECT_GET_CLASS(sink), "force-aspect-ratio"))
        g_object_set(sink, "force-aspect-ratio", TRUE, NULL);

    if (m_videoWindow) {
        m_videoWindow->prepareForOverlay(message);

// gst_x_overlay_set_window_handle was introduced in -plugins-base
// 0.10.31, just like the macro for checking the version.
#ifdef GST_CHECK_PLUGINS_BASE_VERSION
        gst_x_overlay_set_window_handle(GST_X_OVERLAY(sink), m_videoWindow->videoWindowId());
#else
        gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(sink), m_videoWindow->videoWindowId());
#endif
    }
}
Esempio n. 12
0
gint
main (gint argc, gchar ** argv)
{
  GdkWindow *video_window_xwindow;
  GtkWidget *window, *video_window;
  GstElement *pipeline, *src, *sink;
  GstStateChangeReturn sret;
  gulong embed_xid = 0;
  gboolean force_aspect = FALSE, draw_borders = FALSE;

#if !GLIB_CHECK_VERSION (2, 31, 0)
  if (!g_thread_supported ())
    g_thread_init (NULL);
#endif

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  if (argc) {
    gint arg;
    for (arg = 0; arg < argc; arg++) {
      if (!strcmp (argv[arg], "-a"))
        force_aspect = TRUE;
      else if (!strcmp (argv[arg], "-b"))
        draw_borders = TRUE;
      else if (!strcmp (argv[arg], "-v"))
        verbose = TRUE;
    }
  }

  /* prepare the pipeline */

  pipeline = gst_pipeline_new ("xvoverlay");
  src = gst_element_factory_make ("videotestsrc", NULL);
  sink = gst_element_factory_make ("xvimagesink", NULL);
  gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
  gst_element_link (src, sink);

  g_object_set (G_OBJECT (sink), "handle-events", FALSE,
      "force-aspect-ratio", force_aspect, "draw-borders", draw_borders, NULL);

  /* prepare the ui */

  window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (window), "delete-event",
      G_CALLBACK (window_closed), (gpointer) pipeline);
  gtk_window_set_default_size (GTK_WINDOW (window), 320, 240);

  video_window = gtk_drawing_area_new ();
  gtk_widget_set_double_buffered (video_window, FALSE);
  gtk_container_add (GTK_CONTAINER (window), video_window);

  /* show the gui and play */
  gtk_widget_show_all (window);

  /* realize window now so that the video window gets created and we can
   * obtain its XID before the pipeline is started up and the videosink
   * asks for the XID of the window to render onto */
  gtk_widget_realize (window);

  video_window_xwindow = gtk_widget_get_window (video_window);
  embed_xid = GDK_WINDOW_XID (video_window_xwindow);
  if (verbose) {
    g_print ("Window realize: got XID %lu\n", embed_xid);
  }

  /* we know what the video sink is in this case (xvimagesink), so we can
   * just set it directly here now (instead of waiting for a prepare-xwindow-id
   * element message in a sync bus handler and setting it there) */
  gst_x_overlay_set_window_handle (GST_X_OVERLAY (sink), embed_xid);

  anim_state.overlay = GST_X_OVERLAY (sink);
  anim_state.widget = video_window;
  anim_state.w = 320;
  anim_state.h = 240;
  anim_state.a = 0.0;
  anim_state.p = (G_PI + G_PI) / 200.0;

  handle_resize_cb (video_window, NULL, sink);
  g_signal_connect (video_window, "configure-event",
      G_CALLBACK (handle_resize_cb), NULL);
  g_signal_connect (video_window, "draw", G_CALLBACK (handle_draw_cb), NULL);

  g_timeout_add (50, (GSourceFunc) animate_render_rect, NULL);

  /* run the pipeline */
  sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (sret == GST_STATE_CHANGE_FAILURE)
    gst_element_set_state (pipeline, GST_STATE_NULL);
  else {
    anim_state.running = TRUE;
    gtk_main ();
  }

  gst_object_unref (pipeline);
  return 0;
}
Esempio n. 13
0
static void captureOverlay (GtkWidget *widget, GstElement * videoSink)
{
   /* set video output inside the gtk window */
    gst_x_overlay_set_window_handle(GST_X_OVERLAY(videoSink), GDK_WINDOW_XID(widget->window));
}
Esempio n. 14
0
void
gst_set_window_handle(GstXOverlay *xoverlay, guintptr window_handle)
{
  gst_x_overlay_set_window_handle (xoverlay, window_handle);
}
Esempio n. 15
0
static inline void gst_video_overlay_set_window_handle(GstXOverlay *overlay, guintptr handle) {
	gst_x_overlay_set_window_handle(overlay, handle);
}
Esempio n. 16
0
void
gst_play_main() {
    GstPad *audiopad, *videopad;
    //Stop before playing
    if (pipeline) {
        gst_element_set_state(pipeline, GST_STATE_NULL);
        gst_object_unref(GST_OBJECT(pipeline));
        pipeline = NULL;
    }
    {
        mutex = 0;

        /* setup */
        pipeline = gst_pipeline_new("pipeline");

        bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
        gst_bus_add_signal_watch(bus);
	g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, NULL);
	g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, NULL);
        gst_object_unref(bus);

        src = gst_element_factory_make("filesrc", "source");
        g_object_set(G_OBJECT(src), "location", filename, NULL);
        typefind = gst_element_factory_make("typefind", "typefinder");
        //g_signal_connect(typefind,"have-type",G_CALLBACK(typefound_cb),loop);
        dec = gst_element_factory_make("decodebin", "decoder");
        g_signal_connect(dec, "new-decoded-pad", G_CALLBACK(cb_newpad), NULL);
        //myplugin = gst_element_factory_make("myplugin","MyPlugin");
        audioqueue = gst_element_factory_make("queue", "audioqueue");
        videoqueue = gst_element_factory_make("queue", "videoqueue");
        gst_bin_add_many(GST_BIN(pipeline), src, typefind, dec, NULL);
        gst_element_link_many(src, typefind, dec, NULL);

        /* create audio output */
        audio = gst_bin_new("audiobin");
        conv = gst_element_factory_make("audioconvert", "aconv");
        typefind2 = gst_element_factory_make("typefind", "typefinder2");
	volume = gst_element_factory_make("volume","volume");
	level = gst_element_factory_make("level","level");
        //g_signal_connect(typefind2,"have-type",G_CALLBACK(typefound_cb),loop);
        audiopad = gst_element_get_static_pad(audioqueue, "sink");
        sink = gst_element_factory_make("alsasink", "sink");
        gst_bin_add_many(GST_BIN(audio), audioqueue, conv, typefind2,volume,level, sink, NULL);
        gst_element_link_many(audioqueue, conv, typefind2,volume, level,sink, NULL);
        gst_element_add_pad(audio,
                gst_ghost_pad_new("sink", audiopad));
        gst_object_unref(audiopad);
        gst_bin_add(GST_BIN(pipeline), audio);

        /* create video output */
        video = gst_bin_new("videobin");
        caps2 =gst_caps_from_string("video/x-raw-yuv,framerate=25/1");
        caps =gst_caps_from_string("video/x-raw-yuv,width=1024,height=768");
        
        videoRate = gst_element_factory_make("videorate", "Video Rate");
        capsFilter1 = gst_element_factory_make("capsfilter", "Caps Filter");
        g_object_set(G_OBJECT(capsFilter1), "caps", caps2, NULL);
        videoScale = gst_element_factory_make("videoscale", "Video Scale");
        //g_object_set(G_OBJECT(videoScale),"add-borders","true",NULL);
        capsFilter2 = gst_element_factory_make("capsfilter", "Caps Filter2");
        g_object_set(G_OBJECT(capsFilter2), "caps", caps, NULL);
        
        convVid = gst_element_factory_make("ffmpegcolorspace", "converter");
        videopad = gst_element_get_static_pad(videoqueue, "sink");
        videosink = gst_element_factory_make("xvimagesink", "videosink");
        g_object_set(G_OBJECT(videosink),"pixel-aspect-ratio","3/4",NULL);
	g_object_set(G_OBJECT(videosink),"force-aspect-ratio",TRUE,NULL);
        gst_bin_add_many(GST_BIN(video), videoqueue,videoScale,capsFilter2 ,videoRate, capsFilter1, convVid, videosink, NULL);
        gst_element_link_many(videoqueue, videoScale,capsFilter2 ,videoRate, capsFilter1, convVid, videosink, NULL);
        gst_element_add_pad(video, gst_ghost_pad_new("sink", videopad));
        gst_object_unref(videopad);
        gst_bin_add(GST_BIN(pipeline), video);

	

        /* run */
        //gst_element_set_state (pipeline, GST_STATE_PLAYING);

        g_print("Now playing: %s\n", filename);
        if (GST_IS_X_OVERLAY (videosink))
        {
            gst_x_overlay_set_window_handle(GST_X_OVERLAY(videosink), GPOINTER_TO_UINT(GINT_TO_POINTER(GDK_WINDOW_XWINDOW(video_output->window))));
           /* if(gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(videosink),0,0,800,600))
            {
                gst_x_overlay_expose(GST_X_OVERLAY(videosink));    
                g_print("Redraw");
            }*/
        }
        gst_element_set_state(pipeline, GST_STATE_PLAYING);
	gdouble curr_vol; 
    	g_object_get(volume,"volume",&curr_vol,NULL);
    	gtk_scale_button_set_value(GTK_SCALE_BUTTON(volumeButton),(curr_vol)*10);
	g_timeout_add(1, refresh_ui, NULL);
    }
}
Esempio n. 17
0
static GstBusSyncReply
sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
{
  const GstStructure *st;
  const GValue *image;
  GstBuffer *buf = NULL;
  guint8 *data_buf = NULL;
  gchar *caps_string;
  guint size = 0;
  gchar *preview_filename = NULL;
  FILE *f = NULL;
  size_t written;

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ELEMENT:{
      st = gst_message_get_structure (message);
      if (st) {
        if (gst_structure_has_name (message->structure, "prepare-xwindow-id")) {
          if (!no_xwindow && window) {
            gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC
                    (message)), window);
            gst_message_unref (message);
            message = NULL;
            return GST_BUS_DROP;
          }
        } else if (gst_structure_has_name (st, "preview-image")) {
          CaptureTiming *timing;

          GST_DEBUG ("preview-image");

          timing = (CaptureTiming *) g_list_first (capture_times)->data;
          timing->got_preview = gst_util_get_timestamp ();

          {
            /* set up probe to check when the viewfinder gets data */
            GstPad *pad = gst_element_get_static_pad (viewfinder_sink, "sink");

            viewfinder_probe_id = gst_pad_add_buffer_probe (pad,
                (GCallback) viewfinder_get_timestamp_probe, NULL);

            gst_object_unref (pad);
          }

          /* extract preview-image from msg */
          image = gst_structure_get_value (st, "buffer");
          if (image) {
            buf = gst_value_get_buffer (image);
            data_buf = GST_BUFFER_DATA (buf);
            size = GST_BUFFER_SIZE (buf);
            preview_filename = g_strdup_printf ("test_vga.rgb");
            caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf));
            g_free (caps_string);
            f = g_fopen (preview_filename, "w");
            if (f) {
              written = fwrite (data_buf, size, 1, f);
              if (!written) {
                g_print ("error writing file\n");
              }
              fclose (f);
            } else {
              g_print ("error opening file for raw image writing\n");
            }
            g_free (preview_filename);
          }
        }
      }
      break;
    }
    case GST_MESSAGE_STATE_CHANGED:
      if (GST_MESSAGE_SRC (message) == (GstObject *) camerabin) {
        GstState newstate;

        gst_message_parse_state_changed (message, NULL, &newstate, NULL);
        if (newstate == GST_STATE_PLAYING) {
          startup_time = gst_util_get_timestamp ();
        }
      }
      break;
    default:
      /* unhandled message */
      break;
  }
  return GST_BUS_PASS;
}
Esempio n. 18
0
static GstBusSyncReply
sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
{
  const GstStructure *st;
  const GValue *image;
  GstBuffer *buf = NULL;
  guint8 *data_buf = NULL;
  gchar *caps_string;
  guint size = 0;
  gchar *preview_filename = NULL;
  FILE *f = NULL;
  size_t written;

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ELEMENT:{
      st = gst_message_get_structure (message);
      if (st) {
        if (gst_structure_has_name (message->structure, "prepare-xwindow-id")) {
          if (!no_xwindow && window) {
            gst_x_overlay_set_window_handle (GST_X_OVERLAY (GST_MESSAGE_SRC
                    (message)), window);
            gst_message_unref (message);
            message = NULL;
            return GST_BUS_DROP;
          }
        } else if (gst_structure_has_name (st, "image-captured")) {
          GST_DEBUG ("image-captured");
        } else if (gst_structure_has_name (st, "preview-image")) {
          GST_DEBUG ("preview-image");
          //extract preview-image from msg
          image = gst_structure_get_value (st, "buffer");
          if (image) {
            buf = gst_value_get_buffer (image);
            data_buf = GST_BUFFER_DATA (buf);
            size = GST_BUFFER_SIZE (buf);
            preview_filename = g_strdup_printf ("test_vga.rgb");
            caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf));
            g_print ("writing buffer to %s, elapsed: %.2fs, buffer caps: %s\n",
                preview_filename, g_timer_elapsed (timer, NULL), caps_string);
            g_free (caps_string);
            f = g_fopen (preview_filename, "w");
            if (f) {
              written = fwrite (data_buf, size, 1, f);
              if (!written) {
                g_print ("error writing file\n");
              }
              fclose (f);
            } else {
              g_print ("error opening file for raw image writing\n");
            }
            g_free (preview_filename);
          }
        }
      }
      break;
    }
    default:
      /* unhandled message */
      break;
  }
  return GST_BUS_PASS;
}
Esempio n. 19
0
void Pipeline::handleBusMessage(GstMessage *message)
{
    switch (GST_MESSAGE_TYPE(message)) {
    case GST_MESSAGE_ELEMENT:
        {
            // The only message we are handling here is the
            // prepare-xwindow-id one
            if (gst_structure_has_name (message->structure,
                                        "prepare-xwindow-id")) {
                gst_x_overlay_set_window_handle(GST_X_OVERLAY(viewfinder),
                                                windowId);
            }
            break;
        }
    case GST_MESSAGE_ERROR:
        {
            GError *gerror = 0;
            gchar *debug = 0;
            gst_message_parse_error(message, &gerror, &debug);
            qCritical() << "Debug" << debug << " Error " << gerror->message;
            g_free(debug);
            g_error_free(gerror);
            break;
        }

    case GST_MESSAGE_WARNING:
        {
            GError *gerror = 0;
            gchar *debug = 0;
            gst_message_parse_warning(message, &gerror, &debug);
            qWarning() << "Debug" << debug << " Warning " << gerror->message;
            g_free(debug);
            g_error_free(gerror);
            break;
        }

    case GST_MESSAGE_INFO:
        {
            GError *gerror = 0;
            gchar *debug = 0;
            gst_message_parse_info(message, &gerror, &debug);
            qDebug() << "Debug" << debug << " Info " << gerror->message;
            g_free(debug);
            g_error_free(gerror);
            break;
        }

    case GST_MESSAGE_STATE_CHANGED:
        {
            if (GST_ELEMENT(GST_MESSAGE_SRC(message)) == camerabin) {
                GstState oldstate, newstate, pending;
                gst_message_parse_state_changed(message, &oldstate, &newstate, &pending);
                qDebug() << Q_FUNC_INFO << gst_element_state_get_name(oldstate)
                         << "->" << gst_element_state_get_name(newstate) << "=>"
                         << gst_element_state_get_name(pending);

                GstStateChange stateTransition =
                    GST_STATE_TRANSITION(oldstate, newstate);

                switch (stateTransition) {
                case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
                    QMetaObject::invokeMethod(this, "pipelinePlaying", Qt::QueuedConnection);
                    break;
                default:
                    break;
                }
            }
            break;
        }

    default:
        break;
    }
}
gint client_video_stream(int rtp_src,int rtcp_src,int rtcp_sink)
{
  GstElement *rtpbin;
  GstElement *rtpvsrc, *rtcpvsrc, *rtcpvsink;

  GstElement  *videodec, *videosink;  
  GstElement *queue;

  
  GstCaps *caps;
  gboolean res;
  GstPadLinkReturn lres;
  GstPad *srcpad, *sinkpad;

  RTP_SRC_V=rtp_src;
  RTCP_SRC_V=rtcp_src;
  RTCP_SINK_V=rtcp_sink;


  pipelineVC = gst_pipeline_new ("Client");
  g_assert (pipelineVC);


  rtpvsrc = gst_element_factory_make ("udpsrc", "rtpvsrc");
  g_assert (rtpvsrc);
  g_object_set (rtpvsrc, "port", RTP_SRC_V, NULL);
  caps = gst_caps_from_string (VIDEO_CAPS);
  g_object_set (rtpvsrc, "caps", caps, NULL);
  gst_caps_unref (caps);

  rtcpvsrc = gst_element_factory_make ("udpsrc", "rtcpvsrc");
  g_assert (rtcpvsrc);
  g_object_set (rtcpvsrc, "port", RTCP_SRC_V, NULL);

  rtcpvsink = gst_element_factory_make ("udpsink", "rtcpvsink");
  g_assert (rtcpvsink);		
  g_object_set (rtcpvsink, "port", RTCP_SINK_V, "host", DEST_HOST, NULL);
  g_object_set (rtcpvsink, "async", FALSE, "sync", FALSE, NULL);



  gst_bin_add_many (GST_BIN (pipelineVC), rtpvsrc, rtcpvsrc, rtcpvsink, NULL);

  //Video
  videodepay = gst_element_factory_make ("rtpvp8depay", "videodepay");
  g_assert (videodepay);
  videodec = gst_element_factory_make ("vp8dec", "videodec");
  g_assert (videodec);
  videosink = gst_element_factory_make ("xvimagesink", "videosink");
  g_assert (videosink);
  g_object_set(videosink,"sync",FALSE,NULL);
  queue = gst_element_factory_make ("queue","queue");
  g_assert(queue);

  /* add depayloading and playback to the pipelineVC and link */
  gst_bin_add_many (GST_BIN (pipelineVC), videodepay,queue, videodec, videosink, NULL);


  res = gst_element_link_many (videodepay,queue, videodec, videosink, NULL);
  g_assert (res == TRUE);


  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
  g_assert (rtpbin);
  g_object_set(rtpbin,"latency",10,NULL);

  gst_bin_add (GST_BIN (pipelineVC), rtpbin);





  //Video
  srcpad = gst_element_get_static_pad (rtpvsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_1");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);

  // RTCP sinkpad in session 1 
  srcpad = gst_element_get_static_pad (rtcpvsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_1");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  // RTCP srcpad for sending RTCP 
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_1");
  sinkpad = gst_element_get_static_pad (rtcpvsink, "sink");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);


  //Directing xvideo to Drawing Area
  if (GST_IS_X_OVERLAY (videosink))
  {
            gst_x_overlay_set_window_handle(GST_X_OVERLAY(videosink), GPOINTER_TO_UINT(GINT_TO_POINTER(GDK_WINDOW_XWINDOW(remote_video->window))));
  }
  
  

  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), NULL);

  
  g_print ("starting receiver pipelineVC\n");

  return 0;
}