Ejemplo n.º 1
0
static void
test_audio_sendrecv (const gchar * audio_enc_name,
    GstStaticCaps expected_caps, gchar * codec)
{
  GArray *codecs_array;
  gchar *codecs[] = { codec, NULL };
  HandOffData *hod;
  GMainLoop *loop = g_main_loop_new (NULL, TRUE);
  GstSDPMessage *offer, *answer;
  GstElement *pipeline = gst_pipeline_new (NULL);
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  GstElement *audiotestsrc_offerer =
      gst_element_factory_make ("audiotestsrc", NULL);
  GstElement *audiotestsrc_answerer =
      gst_element_factory_make ("audiotestsrc", NULL);
  GstElement *audio_enc_offerer =
      gst_element_factory_make (audio_enc_name, NULL);
  GstElement *audio_enc_answerer =
      gst_element_factory_make (audio_enc_name, NULL);
  GstElement *rtpendpoint_offerer =
      gst_element_factory_make ("rtpendpoint", NULL);
  GstElement *rtpendpoint_answerer =
      gst_element_factory_make ("rtpendpoint", NULL);
  GstElement *fakesink_offerer = gst_element_factory_make ("fakesink", NULL);
  GstElement *fakesink_answerer = gst_element_factory_make ("fakesink", NULL);

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  codecs_array = create_codecs_array (codecs);
  g_object_set (rtpendpoint_offerer, "num-audio-medias", 1, "audio-codecs",
      g_array_ref (codecs_array), NULL);
  g_object_set (rtpendpoint_answerer, "num-audio-medias", 1, "audio-codecs",
      g_array_ref (codecs_array), NULL);
  g_array_unref (codecs_array);

  hod = g_slice_new (HandOffData);
  hod->expected_caps = expected_caps;
  hod->loop = loop;

  g_object_set (G_OBJECT (fakesink_offerer), "signal-handoffs", TRUE, NULL);
  g_signal_connect (G_OBJECT (fakesink_offerer), "handoff",
      G_CALLBACK (sendrecv_offerer_fakesink_hand_off), hod);
  g_object_set (G_OBJECT (fakesink_answerer), "signal-handoffs", TRUE, NULL);
  g_signal_connect (G_OBJECT (fakesink_answerer), "handoff",
      G_CALLBACK (sendrecv_answerer_fakesink_hand_off), hod);

  /* Add elements */
  gst_bin_add (GST_BIN (pipeline), rtpendpoint_offerer);
  connect_sink_async (rtpendpoint_offerer, audiotestsrc_offerer,
      audio_enc_offerer, NULL, pipeline, "sink_audio");

  gst_bin_add (GST_BIN (pipeline), rtpendpoint_answerer);
  connect_sink_async (rtpendpoint_answerer, audiotestsrc_answerer,
      audio_enc_answerer, NULL, pipeline, "sink_audio");

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* SDP negotiation */
  mark_point ();
  g_signal_emit_by_name (rtpendpoint_offerer, "generate-offer", &offer);
  fail_unless (offer != NULL);

  mark_point ();
  g_signal_emit_by_name (rtpendpoint_answerer, "process-offer", offer, &answer);
  fail_unless (answer != NULL);

  mark_point ();
  g_signal_emit_by_name (rtpendpoint_offerer, "process-answer", answer);
  gst_sdp_message_free (offer);
  gst_sdp_message_free (answer);

  gst_bin_add (GST_BIN (pipeline), fakesink_offerer);
  g_object_set_data (G_OBJECT (rtpendpoint_offerer), AUDIO_SINK,
      fakesink_offerer);
  g_signal_connect (rtpendpoint_offerer, "pad-added",
      G_CALLBACK (connect_sink_on_srcpad_added), NULL);
  fail_unless (kms_element_request_srcpad (rtpendpoint_offerer,
          KMS_ELEMENT_PAD_TYPE_AUDIO));

  gst_bin_add (GST_BIN (pipeline), fakesink_answerer);
  g_object_set_data (G_OBJECT (rtpendpoint_answerer), AUDIO_SINK,
      fakesink_answerer);
  g_signal_connect (rtpendpoint_answerer, "pad-added",
      G_CALLBACK (connect_sink_on_srcpad_added), NULL);
  fail_unless (kms_element_request_srcpad (rtpendpoint_answerer,
          KMS_ELEMENT_PAD_TYPE_AUDIO));

  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "test_audio_sendrecv_before_entering_loop");

  mark_point ();
  g_main_loop_run (loop);
  mark_point ();

  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "test_audio_sendrecv_end");

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_main_loop_unref (loop);
  g_object_unref (pipeline);
  g_slice_free (HandOffData, hod);
}
Ejemplo n.º 2
0
int main (int argc, char **argv)
{
  SDL_SysWMinfo info;
  Display *gtkglext_display = NULL;
  Window gtkglext_window = 0;
  GLXContext gtkglext_gl_context = NULL;
  
  GstPipeline *pipeline = NULL;
  GstBus *bus = NULL;
  GstElement *glfilter = NULL;
  GstElement *fakesink = NULL;
  GstState state;
  GAsyncQueue *queue_input_buf = NULL;
  GAsyncQueue *queue_output_buf = NULL;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);
  gtk_gl_init(&argc, &argv);
    
  gint major; 
  gint minor;
  gdk_gl_query_version(&major, &minor);
  g_print("\nOpenGL extension version - %d.%d\n", major, minor);
  /* Try double-buffered visual */

  GdkGLConfig* glconfig;
  // the line above does not work in C++ if the cast is not there.
  glconfig = gdk_gl_config_new_by_mode(static_cast<GdkGLConfigMode>(GDK_GL_MODE_RGB | GDK_GL_MODE_DOUBLE));
  if (glconfig == NULL)
  {
      g_print("*** Cannot find the double-buffered visual.\n");
      g_print("*** Trying single-buffered visual.\n");
      /* Try single-buffered visual */
      glconfig = gdk_gl_config_new_by_mode(static_cast<GdkGLConfigMode>(GDK_GL_MODE_RGB));
      if (glconfig == NULL)
      {
          g_print ("*** No appropriate OpenGL-capable visual found.\n");
          exit(1);
      }
  }
  examine_gl_config_attrib(glconfig);

  // Main GTK window
  GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
  gtk_widget_set_size_request(window, 640, 480);
  gtk_window_set_title(GTK_WINDOW (window), "Toonloop 1.3 experimental");
  GdkGeometry geometry;
  geometry.min_width = 1;
  geometry.min_height = 1;
  geometry.max_width = -1;
  geometry.max_height = -1;
  gtk_window_set_geometry_hints(GTK_WINDOW(window), window, &geometry, GDK_HINT_MIN_SIZE);
  g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(on_delete_event), NULL);

  //area where the video is drawn
  GtkWidget* drawing_area = gtk_drawing_area_new();
  gtk_container_add(GTK_CONTAINER(window), drawing_area);

  /* Set OpenGL-capability to the widget. */
  gtk_widget_set_gl_capability(drawing_area, glconfig, NULL, TRUE, GDK_GL_RGBA_TYPE);

  /* Loop, drawing and checking events */
  g_signal_connect_after(G_OBJECT(drawing_area), "realize", G_CALLBACK(on_realize), NULL);
  g_signal_connect(G_OBJECT(drawing_area), "configure_event", G_CALLBACK(on_configure_event), NULL);
  g_signal_connect(G_OBJECT(drawing_area), "expose_event", G_CALLBACK(on_expose_event), NULL);

  gtk_widget_show_all(window);

 // ------------------ done with the GTK GUI


  /* retrieve and turn off gtkglext opengl context */
  SDL_VERSION (&info.version);
  SDL_GetWMInfo (&info);
  gtkglext_display = info.info.x11.display;
  gtkglext_window = info.info.x11.window;
  gtkglext_gl_context = glXGetCurrentContext ();
  glXMakeCurrent (gtkglext_display, None, 0);

  pipeline =
      GST_PIPELINE (gst_parse_launch
      ("videotestsrc ! video/x-raw-yuv, width=320, height=240, framerate=(fraction)30/1 ! "
          "glupload ! gleffects effect=5 ! fakesink sync=1", NULL));

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), NULL);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), NULL);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), NULL);
  gst_object_unref (bus);

  /* gtkglext_gl_context is an external OpenGL context with which gst-plugins-gl want to share textures */
  glfilter = gst_bin_get_by_name (GST_BIN (pipeline), "gleffects0");
  g_object_set (G_OBJECT (glfilter), "external-opengl-context",
      gtkglext_gl_context, NULL);
  g_object_unref (glfilter);

  /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and
   * shared with the gtkglext one */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
  state = GST_STATE_PAUSED;
  if (gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL,
          GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) {
    g_debug ("failed to pause pipeline\n");
    return -1;
  }

  /* turn on back gtk opengl context */
  glXMakeCurrent (gtkglext_display, gtkglext_window, gtkglext_gl_context);

  /* append a gst-gl texture to this queue when you do not need it no more */
  fakesink = gst_bin_get_by_name (GST_BIN (pipeline), "fakesink0");
  g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL);
  g_signal_connect (fakesink, "handoff", G_CALLBACK (on_gst_buffer), NULL);
  queue_input_buf = g_async_queue_new ();
  queue_output_buf = g_async_queue_new ();
  g_object_set_data (G_OBJECT (fakesink), "queue_input_buf", queue_input_buf);
  g_object_set_data (G_OBJECT (fakesink), "queue_output_buf", queue_output_buf);
  g_object_unref (fakesink);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
  
  gtk_main();

  /* before to deinitialize the gst-gl-opengl context,
   * no shared context (here the gtkglext one) must be current
   */
  glXMakeCurrent (gtkglext_display, gtkglext_window, gtkglext_gl_context);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
  g_object_unref (pipeline);

  /* turn on back gtkglext opengl context */
  glXMakeCurrent (gtkglext_display, None, 0);

  /* make sure there is no pending gst gl buffer in the communication queues 
   * between gtkglext and gst-gl
   */
  while (g_async_queue_length (queue_input_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_input_buf);
    gst_buffer_unref (buf);
  }

  while (g_async_queue_length (queue_output_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_output_buf);
    gst_buffer_unref (buf);
  }

  return 0;
}
Ejemplo n.º 3
0
Recording*
recording_start(const char* filename)
{
	GMAudioProfile *profile;
	GstElement *pipeline, *oss_src, *encoder, *filesink;
	pipeline = oss_src = encoder = filesink = NULL;
	
	profile = gm_audio_profile_lookup(rec_settings.profile);
	g_assert(profile);
	
	pipeline = gst_pipeline_new("gnomeradio-record-pipeline");
	if (!pipeline) {
		show_error_message(_("Could not create GStreamer pipeline."),
		_("Check your Gstreamer installation!"));
		goto error;
	}		
	
	oss_src = gst_element_factory_make("osssrc", "oss-source");
	if (!oss_src) {
		show_error_message(_("Could not open Gstreamer OSS Source."),
		_("Verify your Gstreamer OSS subsystem installation!"));
		goto error;
	}
	
	GstBus *bus = gst_element_get_bus(pipeline);
	gst_bus_add_signal_watch(bus);
	g_signal_connect(G_OBJECT(bus), "message::error", G_CALLBACK(error_cb), pipeline);

	char* pipeline_str = g_strdup_printf("audioconvert ! %s", gm_audio_profile_get_pipeline(profile));
	encoder = my_gst_gconf_render_bin_from_description(pipeline_str);
	g_free(pipeline_str);
	if (!encoder) {
		char *caption = g_strdup_printf(_("Could not create encoder \"%s\"."), gm_audio_profile_get_name (profile));
		show_error_message(caption,
		_("Verify your Gstreamer plugins installation!"));
		g_free(caption);
		goto error;
	}
	
	/* Write to disk */
	filesink = gst_element_factory_make("filesink", "file-sink");
	if (!filesink) {	
		show_error_message(_("Could not create Gstreamer filesink."),
		_("Check your Gstreamer installation!"));
		goto error;
	}
	
	/* Add the elements to the pipeline */
	gst_bin_add_many(GST_BIN(pipeline), oss_src, encoder, filesink, NULL);
	
	/* Link it all together */
	if (!gst_element_link_many(oss_src, encoder, filesink, NULL)) {
		g_warning("Could not link elements. This is bad!\n");
		goto error;
	}
	char* path = g_strdup_printf("%s.%s", filename, gm_audio_profile_get_extension(profile));	
	g_object_set(G_OBJECT(filesink), "location", path, NULL);
	
	gst_element_set_state(pipeline, GST_STATE_PLAYING);
	
	Recording *recording = g_malloc0(sizeof(Recording));
	recording->filename = path;
	recording->pipeline = pipeline;
	
	return recording;
	
error:
	if (pipeline)
		gst_object_unref(GST_OBJECT(pipeline));
	if (oss_src)
		gst_object_unref(GST_OBJECT(oss_src));
	if (encoder)
		gst_object_unref(GST_OBJECT(encoder));
	if (filesink)
		gst_object_unref(GST_OBJECT(filesink));
	
	return NULL;
}		
Ejemplo n.º 4
0
gint main (gint argc, gchar *argv[])
{
    GtkWidget *area;
    gst_init (&argc, &argv);
    gtk_init (&argc, &argv);

    GstElement* pipeline = gst_pipeline_new ("pipeline");
    GstElement* videosrc  = gst_element_factory_make ("videotestsrc", "videotestsrc");
    GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink");

    gst_bin_add_many (GST_BIN (pipeline), videosrc, videosink, NULL);

    gboolean link_ok = gst_element_link_many(videosrc, videosink, NULL) ;
    if(!link_ok)
    {
        g_warning("Failed to link an element!\n") ;
        return -1;
    }

    //set window id on this event
    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_signal_watch (bus);
    g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline);

    gst_element_set_state(pipeline, GST_STATE_READY);

    area = gtk_drawing_area_new();
    gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area, NULL);
    gst_object_unref (bus);

    //window that contains an area where the video is drawn
    GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    gtk_widget_set_size_request (window, 640, 480);
    gtk_window_move (GTK_WINDOW (window), 300, 10);
    gtk_window_set_title (GTK_WINDOW (window), "glimagesink implement the gstvideooverlay interface");
    GdkGeometry geometry;
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE);

    //window to control the states
    GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE);
    gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
    gtk_window_move (GTK_WINDOW (window_control), 10, 10);
    GtkWidget* table = gtk_grid_new ();
    gtk_container_add (GTK_CONTAINER (window_control), table);

    //control state null
    GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
    g_signal_connect (G_OBJECT (button_state_null), "clicked",
        G_CALLBACK (button_state_null_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_null, 0, 0, 1, 1);
    gtk_widget_show (button_state_null);

    //control state ready
    GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
    g_signal_connect (G_OBJECT (button_state_ready), "clicked",
        G_CALLBACK (button_state_ready_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_ready, 0, 1, 1, 1);
    gtk_widget_show (button_state_ready);

    //control state paused
    GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
    g_signal_connect (G_OBJECT (button_state_paused), "clicked",
        G_CALLBACK (button_state_paused_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_paused, 0, 2, 1, 1);
    gtk_widget_show (button_state_paused);

    //control state playing
    GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
    g_signal_connect (G_OBJECT (button_state_playing), "clicked",
        G_CALLBACK (button_state_playing_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_playing, 0, 3, 1, 1);
    gtk_widget_show (button_state_playing);

    //change framerate
    GtkWidget* slider_fps = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL,
        1, 30, 2);
    g_signal_connect (G_OBJECT (slider_fps), "format-value",
        G_CALLBACK (slider_fps_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), slider_fps, 1, 0, 1, 4);
    gtk_widget_show (slider_fps);

    gtk_widget_show (table);
    gtk_widget_show (window_control);

    //configure the pipeline
    g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline);

    //area where the video is drawn
    gtk_container_add (GTK_CONTAINER (window), area);

    gtk_widget_realize(area);

    //needed when being in GST_STATE_READY, GST_STATE_PAUSED
    //or resizing/obscuring the window
    g_signal_connect(area, "draw", G_CALLBACK(draw_cb), videosink);

    gtk_widget_show_all (window);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    gtk_main();

    return 0;
}
Ejemplo n.º 5
0
PocketvoxRecognizer* pocketvox_recognizer_new(gchar* hmm, gchar* lm, gchar* dic, gchar *key, gchar *mat, gchar *dev, gchar* host, gint port)
{
	GstElement *source, *converter, *sampler, *vader, *sphinx;
	GstBus *bus = NULL;

    gchar *material = NULL;
    gchar *device = dev == NULL ? g_strdup("hw:0") : g_strdup(dev);

	if(mat == NULL || g_strcmp0(mat, "Default") != TRUE)
	{
		material = g_strdup("gsettingsaudiosrc");
	}
	else
	{
		if( g_strcmp0(mat, "Alsa") != TRUE)
		{
			material = g_strdup("alsasrc");
		}
		else
		{
            if( g_strcmp0(mat, "Network") != TRUE)
            {
                if( NULL != host)
                {
                    material = g_strdup("tcpserversrc");
                }
                else
                {
                    g_warning("In order to receive order from network you need to give an host and a port");

                    material = g_strdup("gsettingsaudiosrc");
                }
            }
			else
            {
                material = g_strdup("gsettingsaudiosrc");
		    }
        }
	}

	g_return_val_if_fail(g_file_test(hmm, G_FILE_TEST_EXISTS), 	NULL);
	g_return_val_if_fail(g_file_test(lm, G_FILE_TEST_EXISTS), 	NULL);
	g_return_val_if_fail(g_file_test(dic, G_FILE_TEST_EXISTS), 	NULL);

	PocketvoxRecognizer *recognizer = (PocketvoxRecognizer *)g_object_new(TYPE_POCKETVOX_RECOGNIZER,
																		"hmm", 	hmm,
																		"lm", 	lm,
																		"dic",	dic,
																		NULL);

	g_return_val_if_fail(NULL != recognizer, NULL);

	recognizer->priv = G_TYPE_INSTANCE_GET_PRIVATE (recognizer,
		TYPE_POCKETVOX_RECOGNIZER, PocketvoxRecognizerPrivate);
	PocketvoxRecognizerPrivate *priv = recognizer->priv;

	priv->hmm     = g_strdup(hmm);
	priv->lm      = g_strdup(lm);
	priv->dic     = g_strdup(dic);
    priv->keyword = g_strdup(key);

	//build the pipeline auto-threshold=true
    priv->pipeline = gst_pipeline_new("pipeline");
    g_assert(priv->pipeline);

    bus = gst_pipeline_get_bus(GST_PIPELINE(priv->pipeline));
    g_assert(bus);

    source    = gst_element_factory_make(material, "src");
    g_assert(source);

    //set the device name if we use alsasrc input
    if( g_strcmp0(material, "alsasrc") != TRUE )
    {
        g_object_set(G_OBJECT(source),
                      "device", device,
                      NULL);
    }
    else
    {
        if(!g_strcmp0(material, "tcpserversrc"))
        {
            g_object_set(G_OBJECT(source),
                        "host", host,
                        "port", port,
                        NULL);
        }
    }

    converter = gst_element_factory_make("audioconvert",      "convert");
    g_assert(converter);

    sampler   = gst_element_factory_make("audioresample",     "resample");
    g_assert(sampler);

    vader     = gst_element_factory_make("vader",             "vader");
    g_assert(vader);
    g_object_set(G_OBJECT(vader),
                "auto-threshold", TRUE,
                NULL);


    sphinx    = gst_element_factory_make("pocketsphinx",      "psphinx");
    g_assert(sphinx);

    gst_bin_add_many(GST_BIN(priv->pipeline),
                    source,
                    converter,
                    sampler,
                    vader,
                    sphinx,
                    NULL);

    if(!g_strcmp0(material, "tcpserversrc") == FALSE )
    {
        gst_element_link_many(source, converter, sampler, vader, sphinx, NULL);
    }
    else
    {
        GstCaps* caps = gst_caps_new_simple("audio/x-raw-int",
            "endianness", G_TYPE_INT,     1234,
            "signed",     G_TYPE_BOOLEAN, TRUE,
            "width",      G_TYPE_INT,     32,
            "depth",      G_TYPE_INT,     32,
            "rate",       G_TYPE_INT,     44100,
            "channels",   G_TYPE_INT,     2,
            NULL);

        gst_element_link_filtered(source, converter, caps);
        gst_element_link_many(converter, sampler, vader, sphinx, NULL);

        gst_caps_unref(caps);
    }

	//set properties
	g_object_set(G_OBJECT(sphinx),
					"hmm", 	priv->hmm,
					"lm",  	priv->lm,
					"dict", priv->dic,
					NULL);

	//connect to a callback function
	g_signal_connect(sphinx, "result", G_CALLBACK(pocketvox_recognizer_process_result), NULL );

	gst_bus_add_signal_watch(bus);
	g_signal_connect(bus, "message::application", G_CALLBACK(pocketvox_recognizer_parse_bus_message), recognizer);

	//play
	gst_element_set_state(priv->pipeline, GST_STATE_PLAYING);

    if(gst_element_get_state(priv->pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE)
    {
        g_warning("PocketvoxRecognizer: unable to put the pipeline in playing mode");

        return NULL;
    }

    g_free(material);
    g_free(device);

    return recognizer;
}
Ejemplo n.º 6
0
GST_END_TEST
/* Temporaly disabled */
#if 0
GST_START_TEST (test_dtls_send_recv_data)
{
  gchar *cert_key_pem_file;
  GTlsConnection *conn;

  GMainLoop *loop = g_main_loop_new (NULL, TRUE);
  GstElement *pipeline = gst_pipeline_new (__FUNCTION__);
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  GstElement *fakesrc_client = gst_element_factory_make ("fakesrc", NULL);
  GstElement *dtlsenc_client = gst_element_factory_make ("kmsdtlsenc", NULL);
  GstElement *dtlsdec_client = gst_element_factory_make ("kmsdtlsdec", NULL);
  GstElement *fakesink_client = gst_element_factory_make ("fakesink", NULL);

  GstElement *fakesrc_server = gst_element_factory_make ("fakesrc", NULL);
  GstElement *dtlsenc_server = gst_element_factory_make ("kmsdtlsenc", NULL);
  GstElement *dtlsdec_server = gst_element_factory_make ("kmsdtlsdec", NULL);
  GstElement *fakesink_server = gst_element_factory_make ("fakesink", NULL);

  cert_key_pem_file = generate_certkey_pem_file_path ();
  generate_certkey_pem_file (cert_key_pem_file);

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  g_object_set (G_OBJECT (fakesink_client), "signal-handoffs", TRUE, NULL);
  g_signal_connect (G_OBJECT (fakesink_client), "handoff",
      G_CALLBACK (fakesink_dtls_client_hand_off), loop);
  g_object_set (G_OBJECT (fakesink_server), "signal-handoffs", TRUE, NULL);
  g_signal_connect (G_OBJECT (fakesink_server), "handoff",
      G_CALLBACK (fakesink_dtls_server_hand_off), loop);

  g_object_set (G_OBJECT (fakesrc_client), "sizetype", 2, NULL);
  g_object_set (G_OBJECT (fakesrc_server), "sizetype", 2, NULL);

  g_object_set (G_OBJECT (dtlsenc_client), "channel-id", "client-id", NULL);
  g_object_set (G_OBJECT (dtlsenc_client), "is-client", TRUE, NULL);
  g_object_set (G_OBJECT (dtlsdec_client), "channel-id", "client-id", NULL);
  g_object_set (G_OBJECT (dtlsdec_client), "is-client", TRUE, NULL);
  g_object_set (G_OBJECT (dtlsdec_client), "certificate-pem-file",
      cert_key_pem_file, NULL);

  g_object_set (G_OBJECT (dtlsenc_server), "channel-id", "server-id", NULL);
  g_object_set (G_OBJECT (dtlsenc_server), "is-client", FALSE, NULL);
  g_object_set (G_OBJECT (dtlsdec_server), "channel-id", "server-id", NULL);
  g_object_set (G_OBJECT (dtlsdec_server), "is-client", FALSE, NULL);
  g_object_set (G_OBJECT (dtlsdec_server), "certificate-pem-file",
      cert_key_pem_file, NULL);

  mark_point ();
  gst_bin_add_many (GST_BIN (pipeline), fakesrc_client, dtlsdec_client,
      dtlsenc_client, fakesink_client, NULL);
  gst_bin_add_many (GST_BIN (pipeline), fakesrc_server, dtlsdec_server,
      dtlsenc_server, fakesink_server, NULL);
  mark_point ();

  gst_element_link_many (fakesrc_client, dtlsenc_client, dtlsdec_server,
      fakesink_server, NULL);
  gst_element_link_many (fakesrc_server, dtlsenc_server, dtlsdec_client,
      fakesink_client, NULL);
  mark_point ();

  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "dtls_before_playing");

  gst_element_set_state (fakesink_server, GST_STATE_PLAYING);
  gst_element_set_state (dtlsdec_server, GST_STATE_PLAYING);
  gst_element_set_state (dtlsenc_client, GST_STATE_PLAYING);
  gst_element_set_state (fakesrc_client, GST_STATE_PLAYING);

  gst_element_set_state (fakesink_client, GST_STATE_PLAYING);
  gst_element_set_state (dtlsdec_client, GST_STATE_PLAYING);
  gst_element_set_state (dtlsenc_server, GST_STATE_PLAYING);
  gst_element_set_state (fakesrc_server, GST_STATE_PLAYING);

  /* Init DTLS handshake over dtlsenc connection */
  g_object_get (dtlsenc_client, "tls-connection", &conn, NULL);
  g_tls_connection_handshake_async (conn, G_PRIORITY_DEFAULT, NULL, NULL, NULL);

  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "dtls_before_entering_loop");

  g_main_loop_run (loop);

  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
      GST_DEBUG_GRAPH_SHOW_ALL, "dtls_end");

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_object_unref (pipeline);
  g_main_loop_unref (loop);

  g_remove (cert_key_pem_file);
  g_free (cert_key_pem_file);
}
Ejemplo n.º 7
0
GST_END_TEST
GST_START_TEST (request_data_sink_pad)
{
  gchar *padname = NULL;
  KmsConnectData *data;
  GstBus *bus;

  data = kms_connect_data_create (1);
  data->data_probe = (KmsProbeType) data_probe_cb;
  data->audio_probe = (KmsProbeType) audio_probe_cb;
  data->video_probe = (KmsProbeType) video_probe_cb;

  /* Only tests data */
  data->video_checks = data->audio_checks = 0;

  loop = g_main_loop_new (NULL, TRUE);
  pipeline = gst_pipeline_new (__FUNCTION__);
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  data->src = gst_element_factory_make ("dummysrc", NULL);
  data->sink = gst_element_factory_make ("dummysink", NULL);
  g_signal_connect (data->src, "pad-added", G_CALLBACK (src_pads_added), data);
  g_signal_connect (data->sink, "pad-added",
      G_CALLBACK (sink_pads_added), data);

  g_signal_connect (data->sink, "pad-removed",
      G_CALLBACK (sink_pads_removed), data);

  gst_bin_add_many (GST_BIN (pipeline), data->src, data->sink, NULL);

  /* request src pad using action */
  g_signal_emit_by_name (data->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_DATA, NULL, GST_PAD_SRC, &data->data_src);
  fail_if (data->data_src == NULL);

  GST_DEBUG ("Data pad name: %s", data->data_src);

  /* request sink pad using action */
  g_signal_emit_by_name (data->sink, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_DATA, "test", GST_PAD_SINK, &padname);
  fail_if (padname == NULL);

  GST_DEBUG ("Data pad name: %s", padname);

  g_object_set (G_OBJECT (data->src), "data", TRUE, NULL);

  g_timeout_add_seconds (4, print_timedout_pipeline, NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_object_unref (pipeline);
  g_free (padname);
  g_main_loop_unref (loop);
  kms_connect_data_destroy (data);
}
Ejemplo n.º 8
0
int
main (int argc, char **argv)
{

#ifdef WIN32
  HGLRC sdl_gl_context = 0;
  HDC sdl_dc = 0;
#else
  SDL_SysWMinfo info;
  Display *sdl_display = NULL;
  Window sdl_win = 0;
  GLXContext sdl_gl_context = NULL;
#endif

  GMainLoop *loop = NULL;
  GstPipeline *pipeline = NULL;
  GstBus *bus = NULL;
  GstElement *glfilter = NULL;
  GstElement *fakesink = NULL;
  GstState state;
  GAsyncQueue *queue_input_buf = NULL;
  GAsyncQueue *queue_output_buf = NULL;
  GstGLDisplay *display;
  GstGLContext *sdl_context;
  const gchar *platform;

  /* Initialize SDL for video output */
  if (SDL_Init (SDL_INIT_VIDEO) < 0) {
    fprintf (stderr, "Unable to initialize SDL: %s\n", SDL_GetError ());
    return -1;
  }

  /* Create a 640x480 OpenGL screen */
  if (SDL_SetVideoMode (640, 480, 0, SDL_OPENGL) == NULL) {
    fprintf (stderr, "Unable to create OpenGL screen: %s\n", SDL_GetError ());
    SDL_Quit ();
    return -1;
  }

  /* Set the title bar in environments that support it */
  SDL_WM_SetCaption ("SDL and gst-plugins-gl", NULL);


  /* Loop, drawing and checking events */
  InitGL (640, 480);

  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* retrieve and turn off sdl opengl context */
#ifdef WIN32
  sdl_gl_context = wglGetCurrentContext ();
  sdl_dc = wglGetCurrentDC ();
  wglMakeCurrent (0, 0);
  platform = "wgl";
  display = gst_gl_display_new ();
#else
  SDL_VERSION (&info.version);
  SDL_GetWMInfo (&info);
  /* FIXME: This display is different to the one that SDL uses to create the
   * GL context inside SDL_SetVideoMode() above which fails on Intel hardware
   */
  sdl_display = info.info.x11.display;
  sdl_win = info.info.x11.window;
  sdl_gl_context = glXGetCurrentContext ();
  glXMakeCurrent (sdl_display, None, 0);
  platform = "glx";
  display = (GstGLDisplay *) gst_gl_display_x11_new_with_display (sdl_display);
#endif

  sdl_context = gst_gl_context_new_wrapped (display, (guintptr) sdl_gl_context,
      gst_gl_platform_from_string (platform), GST_GL_API_OPENGL);

  pipeline =
      GST_PIPELINE (gst_parse_launch
      ("videotestsrc ! video/x-raw, width=320, height=240, framerate=(fraction)30/1 ! "
          "gleffects effect=5 ! fakesink sync=1", NULL));

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), loop);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), loop);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), loop);
  gst_object_unref (bus);

  /* sdl_gl_context is an external OpenGL context with which gst-plugins-gl want to share textures */
  glfilter = gst_bin_get_by_name (GST_BIN (pipeline), "gleffects0");
  g_object_set (G_OBJECT (glfilter), "other-context", sdl_context, NULL);
  gst_object_unref (glfilter);

  /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and
   * shared with the sdl one */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
  state = GST_STATE_PAUSED;
  if (gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL,
          GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) {
    g_debug ("failed to pause pipeline\n");
    return -1;
  }

  /* turn on back sdl opengl context */
#ifdef WIN32
  wglMakeCurrent (sdl_dc, sdl_gl_context);
#else
  glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context);
#endif

  /* append a gst-gl texture to this queue when you do not need it no more */
  fakesink = gst_bin_get_by_name (GST_BIN (pipeline), "fakesink0");
  g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL);
  g_signal_connect (fakesink, "handoff", G_CALLBACK (on_gst_buffer), NULL);
  queue_input_buf = g_async_queue_new ();
  queue_output_buf = g_async_queue_new ();
  g_object_set_data (G_OBJECT (fakesink), "queue_input_buf", queue_input_buf);
  g_object_set_data (G_OBJECT (fakesink), "queue_output_buf", queue_output_buf);
  g_object_set_data (G_OBJECT (fakesink), "loop", loop);
  gst_object_unref (fakesink);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

  g_main_loop_run (loop);

  /* before to deinitialize the gst-gl-opengl context,
   * no shared context (here the sdl one) must be current
   */
#ifdef WIN32
  wglMakeCurrent (0, 0);
#else
  glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context);
#endif

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
  gst_object_unref (pipeline);

  /* turn on back sdl opengl context */
#ifdef WIN32
  wglMakeCurrent (sdl_dc, sdl_gl_context);
#else
  glXMakeCurrent (sdl_display, None, 0);
#endif

  SDL_Quit ();

  /* make sure there is no pending gst gl buffer in the communication queues 
   * between sdl and gst-gl
   */
  while (g_async_queue_length (queue_input_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_input_buf);
    gst_buffer_unref (buf);
  }

  while (g_async_queue_length (queue_output_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_output_buf);
    gst_buffer_unref (buf);
  }

  return 0;
}
Ejemplo n.º 9
0
/* compare output with ffmpegcolorspace */
static void
colorspace_compare (gint width, gint height, gboolean comp)
{
  GstBus *bus;
  GstElement *pipeline, *src, *filter1, *filter2, *csp, *fcsp, *fakesink;
  GstElement *queue1, *queue2, *tee, *compare;
  GstCaps *caps, *tcaps, *rcaps, *fcaps;
  GstCaps *ccaps;
  GstPad *pad;

  gint i, j;

  /* create elements */
  pipeline = gst_pipeline_new ("pipeline");
  src = gst_element_factory_make ("videotestsrc", "videotestsrc");
  fail_unless (src != NULL);
  filter1 = gst_element_factory_make ("capsfilter", "capsfilter1");
  fail_unless (filter1 != NULL);
  csp = gst_element_factory_make ("colorspace", "colorspace");
  fail_unless (csp != NULL);
  filter2 = gst_element_factory_make ("capsfilter", "capsfilter2");
  fail_unless (filter2 != NULL);

  if (comp) {
    fcsp = gst_element_factory_make ("ffmpegcolorspace", "ffmpegcolorspace");
    fail_unless (fcsp != NULL);
    tee = gst_element_factory_make ("tee", "tee");
    fail_unless (tee != NULL);
    queue1 = gst_element_factory_make ("queue", "queue1");
    fail_unless (queue1 != NULL);
    queue2 = gst_element_factory_make ("queue", "queue2");
    fail_unless (queue2 != NULL);
    compare = gst_element_factory_make ("compare", "compare");
    fail_unless (compare != NULL);
  } else {
    fcsp = tee = queue1 = queue2 = compare = NULL;
  }

  fakesink = gst_element_factory_make ("fakesink", "fakesink");
  fail_unless (fakesink != NULL);

  /* add and link */
  gst_bin_add_many (GST_BIN (pipeline), src, filter1, filter2, csp, fakesink,
      tee, queue1, queue2, fcsp, compare, NULL);

  fail_unless (gst_element_link (src, filter1));

  if (comp) {
    fail_unless (gst_element_link (filter1, tee));

    fail_unless (gst_element_link (tee, queue1));
    fail_unless (gst_element_link (queue1, fcsp));
    fail_unless (gst_element_link_pads (fcsp, NULL, compare, "sink"));

    fail_unless (gst_element_link (tee, queue2));
    fail_unless (gst_element_link (queue2, csp));
    fail_unless (gst_element_link_pads (csp, NULL, compare, "check"));

    fail_unless (gst_element_link (compare, filter2));
  } else {
    fail_unless (gst_element_link (filter1, csp));
    fail_unless (gst_element_link (csp, filter2));
  }
  fail_unless (gst_element_link (filter2, fakesink));

  /* obtain possible caps combinations */
  if (comp) {
    pad = gst_element_get_static_pad (fcsp, "sink");
    fail_unless (pad != NULL);
    ccaps = gst_pad_get_pad_template_caps (pad);
    fail_unless (ccaps != NULL);
    fcaps = ccaps;
    gst_object_unref (pad);
  } else {
    fcaps = gst_caps_new_any ();
  }

  pad = gst_element_get_static_pad (csp, "sink");
  fail_unless (pad != NULL);
  ccaps = gst_pad_get_pad_template_caps (pad);
  fail_unless (ccaps != NULL);
  gst_object_unref (pad);

  /* handle videotestsrc limitations */
  pad = gst_element_get_static_pad (src, "src");
  fail_unless (pad != NULL);
  caps = (GstCaps *) gst_pad_get_pad_template_caps (pad);
  fail_unless (caps != NULL);
  gst_object_unref (pad);

  rcaps = gst_caps_new_simple ("video/x-raw-yuv",
      "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
      "framerate", GST_TYPE_FRACTION, 25, 1,
      "color-matrix", G_TYPE_STRING, "sdtv",
      "chroma-site", G_TYPE_STRING, "mpeg2", NULL);
  gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb",
          "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
          "framerate", GST_TYPE_FRACTION, 25, 1,
          "depth", G_TYPE_INT, 32, NULL));

  /* FIXME also allow x-raw-gray if/when colorspace actually handles those */

  /* limit to supported compare types */
  if (comp) {
    gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb",
            "width", G_TYPE_INT, width, "height", G_TYPE_INT, height,
            "framerate", GST_TYPE_FRACTION, 25, 1,
            "depth", G_TYPE_INT, 24, NULL));
  }

  tcaps = gst_caps_intersect (fcaps, ccaps);
  gst_caps_unref (fcaps);
  gst_caps_unref (ccaps);
  caps = gst_caps_intersect (tcaps, caps);
  gst_caps_unref (tcaps);
  tcaps = caps;
  caps = gst_caps_intersect (tcaps, rcaps);
  gst_caps_unref (tcaps);
  gst_caps_unref (rcaps);

  /* normalize to finally have a list of acceptable fixed formats */
  caps = gst_caps_simplify (caps);
  caps = gst_caps_normalize (caps);

  /* set up for running stuff */
  loop = g_main_loop_new (NULL, FALSE);
  bus = gst_element_get_bus (pipeline);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::eos", (GCallback) message_cb, NULL);
  gst_object_unref (bus);

  g_object_set (src, "num-buffers", 5, NULL);
  if (comp) {
    /* set lower bound for ssim comparison, and allow slightly different caps */
    g_object_set (compare, "method", 2, NULL);
    g_object_set (compare, "meta", 3, NULL);
    g_object_set (compare, "threshold", 0.90, NULL);
    g_object_set (compare, "upper", FALSE, NULL);
  }

  GST_INFO ("possible caps to check %d", gst_caps_get_size (caps));

  /* loop over all input and output combinations */
  for (i = 0; i < gst_caps_get_size (caps); i++) {
    for (j = 0; j < gst_caps_get_size (caps); j++) {
      GstCaps *in_caps, *out_caps;
      GstStructure *s;
      const gchar *fourcc;

      in_caps = gst_caps_copy_nth (caps, i);
      out_caps = gst_caps_copy_nth (caps, j);

      /* FIXME remove if videotestsrc and video format handle these properly */
      s = gst_caps_get_structure (in_caps, 0);
      if ((fourcc = gst_structure_get_string (s, "format"))) {
        if (!strcmp (fourcc, "YUV9") ||
            !strcmp (fourcc, "YVU9") || !strcmp (fourcc, "v216")) {
          gst_caps_unref (in_caps);
          gst_caps_unref (out_caps);
          continue;
        }
      }

      GST_INFO ("checking conversion from %" GST_PTR_FORMAT " (%d)"
          " to %" GST_PTR_FORMAT " (%d)", in_caps, i, out_caps, j);

      g_object_set (filter1, "caps", in_caps, NULL);
      g_object_set (filter2, "caps", out_caps, NULL);

      fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING)
          != GST_STATE_CHANGE_FAILURE);

      g_main_loop_run (loop);

      fail_unless (gst_element_set_state (pipeline, GST_STATE_NULL)
          == GST_STATE_CHANGE_SUCCESS);

      gst_caps_unref (in_caps);
      gst_caps_unref (out_caps);
    }
  }

  gst_caps_unref (caps);
  gst_object_unref (pipeline);
  g_main_loop_unref (loop);
}
Ejemplo n.º 10
0
gint main (gint argc, gchar *argv[])
{
    gtk_init (&argc, &argv);
    gst_init (&argc, &argv);

    GstElement* pipeline = gst_pipeline_new ("pipeline");

    //window that contains an area where the video is drawn
    GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    gtk_widget_set_size_request (window, 640, 480);
    gtk_window_move (GTK_WINDOW (window), 300, 10);
    gtk_window_set_title (GTK_WINDOW (window), "glimagesink implement the gstxoverlay interface");
    GdkGeometry geometry;
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE);

    //window to control the states
    GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE);
    gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
    gtk_window_move (GTK_WINDOW (window_control), 10, 10);
    GtkWidget* table = gtk_table_new (2, 1, TRUE);
    gtk_container_add (GTK_CONTAINER (window_control), table);

    //control state null
    GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
    g_signal_connect (G_OBJECT (button_state_null), "clicked",
        G_CALLBACK (button_state_null_cb), pipeline);
    gtk_table_attach_defaults (GTK_TABLE (table), button_state_null, 0, 1, 0, 1);
    gtk_widget_show (button_state_null);

    //control state ready
    GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
    g_signal_connect (G_OBJECT (button_state_ready), "clicked",
        G_CALLBACK (button_state_ready_cb), pipeline);
    gtk_table_attach_defaults (GTK_TABLE (table), button_state_ready, 0, 1, 1, 2);
    gtk_widget_show (button_state_ready);

    //control state paused
    GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
    g_signal_connect (G_OBJECT (button_state_paused), "clicked",
        G_CALLBACK (button_state_paused_cb), pipeline);
    gtk_table_attach_defaults (GTK_TABLE (table), button_state_paused, 0, 1, 2, 3);
    gtk_widget_show (button_state_paused);

    //control state playing
    GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
    g_signal_connect (G_OBJECT (button_state_playing), "clicked",
        G_CALLBACK (button_state_playing_cb), pipeline);
    gtk_table_attach_defaults (GTK_TABLE (table), button_state_playing, 0, 1, 3, 4);
    gtk_widget_show (button_state_playing);

    //change framerate
    GtkWidget* slider_fps = gtk_vscale_new_with_range (1, 30, 2);
    g_signal_connect (G_OBJECT (slider_fps), "format-value",
        G_CALLBACK (slider_fps_cb), pipeline);
    gtk_table_attach_defaults (GTK_TABLE (table), slider_fps, 1, 2, 0, 4);
    gtk_widget_show (slider_fps);

    gtk_widget_show (table);
    gtk_widget_show (window_control);

    //configure the pipeline
    g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline);

    GstElement* videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc");
    GstElement* glupload = gst_element_factory_make ("glupload", "glupload");
    GstElement* glfiltercube = gst_element_factory_make ("glfiltercube", "glfiltercube");
    GstElement* glfilterlaplacian = gst_element_factory_make ("glfilterlaplacian", "glfilterlaplacian");
    GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink");

    GstCaps *caps = gst_caps_new_simple("video/x-raw-yuv",
                                        "width", G_TYPE_INT, 640,
                                        "height", G_TYPE_INT, 480,
                                        "framerate", GST_TYPE_FRACTION, 25, 1,
                                        "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'),
                                        NULL) ;

    gst_bin_add_many (GST_BIN (pipeline), videosrc, glupload, glfiltercube, glfilterlaplacian, videosink, NULL);

    gboolean link_ok = gst_element_link_filtered(videosrc, glupload, caps) ;
    gst_caps_unref(caps) ;
    if(!link_ok)
    {
        g_warning("Failed to link videosrc to glupload!\n") ;
        return -1;
    }

    if(!gst_element_link_many(glupload, glfiltercube, glfilterlaplacian, videosink, NULL))
    {
        g_warning("Failed to link glupload to videosink!\n") ;
        return -1;
    }

    //area where the video is drawn
    GtkWidget* area = gtk_drawing_area_new();
    gtk_container_add (GTK_CONTAINER (window), area);

    //set window id on this event
    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area);
    gst_bus_add_signal_watch (bus);
    g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline);
    gst_object_unref (bus);

    //needed when being in GST_STATE_READY, GST_STATE_PAUSED
    //or resizing/obscuring the window
    g_signal_connect(area, "expose-event", G_CALLBACK(expose_cb), videosink);

    g_signal_connect (area, "realize", G_CALLBACK (area_realize_cb), pipeline);

    //start
    GstStateChangeReturn ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_print ("Failed to start up pipeline!\n");
        return -1;
    }

    gtk_widget_show_all (window);

    gtk_main();

    return 0;
}
Ejemplo n.º 11
0
void  _receive_video_init_gstreamer(NiceAgent *magent, guint stream_id, CustomData *data)
{
  GstElement *pipeline, *source, *capsfilter, *videoconvert, *h263p, *rtph263pdepay, *sink;
  GstBus *bus;
  GstMessage *msg;
  GstStateChangeReturn ret;
  GSource *bus_source;

  GST_INFO ("Pipeline initialization");
  // TODO: figure out showing video

  source = gst_element_factory_make ("udpsrc", "source");
  //videoconvert = gst_element_factory_make ("videoconvert", "convert");
  //capsfilter = gst_element_factory_make ("capsfilter", "caps");
  rtph263pdepay = gst_element_factory_make ("rtph263pdepay", "rtph263pdepay");
  h263p = gst_element_factory_make ("avdec_h263p", "h263p");
  sink = gst_element_factory_make ("autovideosink", "sink");

  /*
  g_object_set (source, "agent", magent, NULL);
  g_object_set (source, "stream", stream_id, NULL);
  g_object_set (source, "component", 1, NULL);
  */

  g_object_set (source, "address", "127.0.0.1", NULL);
  g_object_set (source, "port", 1234, NULL);

  g_object_set (source, "caps",
    gst_caps_from_string("application/x-rtp"), NULL);
  /*
  g_object_set (source, "caps", gst_caps_from_string(
    "application/x-rtp\,\ media\=\(string\)video\,\ "
    "clock-rate\=\(int\)90000\,\ "
    "encoding-name\=\(string\)H263-1998\,\ "
    "payload\=\(int\)96"),
    NULL);
  */

  //g_object_set (sink, "sync", FALSE, NULL);

  pipeline = gst_pipeline_new ("Video receive pipeline");

  if (!pipeline || !source || //!capsfilter ||
      !h263p || !rtph263pdepay || !sink)
  {
    g_printerr ("Not all elements could be created.\n");
    return;
  }

  // Build the pipeline
  gst_bin_add_many (GST_BIN (pipeline), source, //capsfilter,
            rtph263pdepay, h263p, sink, NULL);

  if (gst_element_link_many (source, //capsfilter,
                rtph263pdepay, h263p, sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (pipeline);
    return;
  }

  // TODO: this is just output dump pipeline
  /*
  source = gst_element_factory_make ("nicesrc", "source");
  sink = gst_element_factory_make ("fakesink", "sink");

  g_object_set (source, "agent", magent, NULL);
  g_object_set (source, "stream", stream_id, NULL);
  g_object_set (source, "component", 1, NULL);
  g_object_set (sink, "dump", 1, NULL);


  pipeline = gst_pipeline_new ("Video send pipeline");
  if (!pipeline || !source || !sink) {
    g_printerr ("Not all elements could be created.\n");
    return;
  }

  // Build the pipeline
  gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
  if (gst_element_link (source, sink) != TRUE) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (pipeline);
    return;
  }
  */

  GST_INFO ("Pipeline created, registing on bus");

  bus = gst_element_get_bus (pipeline);
  gst_bus_enable_sync_message_emission (bus);
  gst_bus_add_signal_watch (bus);

  g_signal_connect (bus, "message::error",
      (GCallback) on_error, NULL);

  GST_INFO ("Registing pipeline on bus");

  data->pipeline = pipeline;
  ret = gst_element_set_state(data->pipeline, GST_STATE_PLAYING);

  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return;
  }
}
Ejemplo n.º 12
0
static void
gst_nle_source_next (GstNleSource * nlesrc)
{
  GstNleSrcItem *item;
  GstStateChangeReturn ret;
  GstElement *uridecodebin;
  GstBus *bus;
  GstState state;

  nlesrc->index++;

  if (nlesrc->index >= g_list_length (nlesrc->queue)) {
    gst_nle_source_push_eos (nlesrc);
    return;
  }

  if (nlesrc->source != NULL) {
    gst_object_unref (nlesrc->source);
    nlesrc->source = NULL;
  }

  if (nlesrc->decoder != NULL) {
    gst_element_set_state (GST_ELEMENT (nlesrc->decoder), GST_STATE_NULL);
    gst_element_get_state (GST_ELEMENT (nlesrc->decoder), NULL, NULL, 0);
    gst_object_unref (nlesrc->decoder);
  }

  nlesrc->decoder = gst_pipeline_new ("decoder");
  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
  /* Connect signal to recover source element for queries in bytes */
  g_signal_connect (uridecodebin, "source-setup",
      G_CALLBACK (gst_nle_source_on_source_setup), nlesrc); 

  gst_bin_add (GST_BIN (nlesrc->decoder), uridecodebin);

  g_signal_connect (uridecodebin, "autoplug-select",
      G_CALLBACK (lgm_filter_video_decoders), nlesrc);
  g_signal_connect (uridecodebin, "pad-added",
      G_CALLBACK (gst_nle_source_pad_added_cb), nlesrc);
  g_signal_connect (uridecodebin, "no-more-pads",
      G_CALLBACK (gst_nle_source_no_more_pads), nlesrc);

  bus = GST_ELEMENT_BUS (nlesrc->decoder);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (gst_nle_source_bus_message),
      nlesrc);
  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  GST_INFO_OBJECT (nlesrc, "Starting next item with uri:%s", item->file_path);
  GST_INFO_OBJECT (nlesrc, "start:%" GST_TIME_FORMAT " stop:%"
      GST_TIME_FORMAT " rate:%f", GST_TIME_ARGS (item->start),
      GST_TIME_ARGS (item->stop), item->rate);

  g_object_set (uridecodebin, "uri", item->file_path, NULL);

  nlesrc->seek_done = FALSE;
  if (GST_CLOCK_TIME_IS_VALID (item->stop)) {
    nlesrc->video_seek_done = FALSE;
    nlesrc->audio_seek_done = FALSE;
  } else {
    nlesrc->video_seek_done = TRUE;
    nlesrc->audio_seek_done = TRUE;
  }
  nlesrc->audio_eos = TRUE;
  nlesrc->video_eos = TRUE;
  nlesrc->audio_ts = 0;
  nlesrc->video_ts = 0;
  nlesrc->start_ts = nlesrc->accu_time;
  nlesrc->video_linked = FALSE;
  nlesrc->audio_linked = FALSE;
  nlesrc->item_setup = FALSE;
  nlesrc->cached_duration = 0;

  GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,
      GST_TIME_ARGS (nlesrc->start_ts));
  gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);
  ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 5 * GST_SECOND);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");
    gst_nle_source_check_eos (nlesrc);
    return;
  }

  nlesrc->seek_done = TRUE;
  if (!item->still_picture && GST_CLOCK_TIME_IS_VALID (item->stop)) {
    GST_DEBUG_OBJECT (nlesrc, "Sending seek event");
    gst_element_seek (nlesrc->decoder, 1, GST_FORMAT_TIME,
        GST_SEEK_FLAG_ACCURATE,
        GST_SEEK_TYPE_SET, item->start, GST_SEEK_TYPE_SET, item->stop);
  }
}
gint
main (gint argc, gchar * argv[])
{
  PlayState state;
  GstElement *decoder;
  GstStateChangeReturn res;
  GstBus *bus;

  gst_init (&argc, &argv);

  if (argc != 2) {
    g_printerr ("Decode file from start to end.\n");
    g_printerr ("Usage: %s URI\n\n", argv[0]);
    return 1;
  }
  /* Start with zeroed-state */
  memset (&state, 0, sizeof (PlayState));

  state.loop = g_main_loop_new (NULL, TRUE);
  state.pipe = gst_pipeline_new ("pipeline");
  state.fwd_play = TRUE;
  g_mutex_init (&state.output_lock);

  bus = gst_pipeline_get_bus (GST_PIPELINE (state.pipe));
  gst_bus_add_signal_watch (bus);

  g_signal_connect (bus, "message::eos", G_CALLBACK (eos_cb), &state);
  g_signal_connect (bus, "message::error", G_CALLBACK (error_cb), &state);
  g_signal_connect (bus, "message::warning", G_CALLBACK (warning_cb), NULL);
  g_signal_connect (bus, "message::state-changed", G_CALLBACK (state_cb),
      &state);

#if 0
  g_signal_connect (state.pipe, "deep-notify",
      G_CALLBACK (gst_object_default_deep_notify), NULL);
#endif

  decoder = gst_element_factory_make ("uridecodebin", "decoder");
  g_assert (decoder);
  gst_bin_add (GST_BIN (state.pipe), decoder);

  if (argv[1] && strstr (argv[1], "://") != NULL) {
    g_object_set (G_OBJECT (decoder), "uri", argv[1], NULL);
  } else if (argv[1]) {
    gchar *uri = g_strdup_printf ("file://%s", argv[1]);
    g_object_set (G_OBJECT (decoder), "uri", uri, NULL);
    g_free (uri);
  } else {
    g_print ("Usage: %s <filename|uri>\n", argv[0]);
    return -1;
  }

  g_signal_connect (decoder, "pad-added", G_CALLBACK (pad_added_cb), &state);

  res = gst_element_set_state (state.pipe, GST_STATE_PLAYING);
  if (res == GST_STATE_CHANGE_FAILURE) {
    g_print ("could not play\n");
    return -1;
  }

  g_main_loop_run (state.loop);

  /* tidy up */
  gst_element_set_state (state.pipe, GST_STATE_NULL);
  gst_object_unref (state.pipe);
  gst_object_unref (bus);

  return 0;
}
Ejemplo n.º 14
0
GstElement *
purple_media_manager_get_pipeline(PurpleMediaManager *manager)
{
#ifdef USE_VV
	g_return_val_if_fail(PURPLE_IS_MEDIA_MANAGER(manager), NULL);

	if (manager->priv->pipeline == NULL) {
		FsElementAddedNotifier *notifier;
		gchar *filename;
		GError *err = NULL;
		GKeyFile *keyfile;
		GstBus *bus;
		manager->priv->pipeline = gst_pipeline_new(NULL);

		bus = gst_pipeline_get_bus(
				GST_PIPELINE(manager->priv->pipeline));
		gst_bus_add_signal_watch(GST_BUS(bus));
		g_signal_connect(G_OBJECT(bus), "message",
				G_CALLBACK(pipeline_bus_call), manager);
		gst_bus_set_sync_handler(bus,
				gst_bus_sync_signal_handler, NULL);
		gst_object_unref(bus);

		filename = g_build_filename(purple_user_dir(),
				"fs-element.conf", NULL);
		keyfile = g_key_file_new();
		if (!g_key_file_load_from_file(keyfile, filename,
				G_KEY_FILE_NONE, &err)) {
			if (err->code == 4)
				purple_debug_info("mediamanager",
						"Couldn't read "
						"fs-element.conf: %s\n",
						err->message);
			else
				purple_debug_error("mediamanager",
						"Error reading "
						"fs-element.conf: %s\n",
						err->message);
			g_error_free(err);
		}
		g_free(filename);

		/* Hack to make alsasrc stop messing up audio timestamps */
		if (!g_key_file_has_key(keyfile,
				"alsasrc", "slave-method", NULL)) {
			g_key_file_set_integer(keyfile,
					"alsasrc", "slave-method", 2);
		}

		notifier = fs_element_added_notifier_new();
		fs_element_added_notifier_add(notifier,
				GST_BIN(manager->priv->pipeline));
		fs_element_added_notifier_set_properties_from_keyfile(
				notifier, keyfile);

		gst_element_set_state(manager->priv->pipeline,
				GST_STATE_PLAYING);
	}

	return manager->priv->pipeline;
#else
	return NULL;
#endif
}
gint main (gint argc, gchar *argv[])
{
    gtk_init (&argc, &argv);
    gst_init (&argc, &argv);

    GstElement* pipeline = gst_pipeline_new ("pipeline");

    //window to control the states
    GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    GdkGeometry geometry;
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE);
    gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
    gtk_window_move (GTK_WINDOW (window_control), 10, 10);
    GtkWidget* table = gtk_grid_new ();
    gtk_container_add (GTK_CONTAINER (window_control), table);

    //control state null
    GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
    g_signal_connect (G_OBJECT (button_state_null), "clicked",
        G_CALLBACK (button_state_null_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_null, 0, 0, 1, 1);
    gtk_widget_show (button_state_null);

    //control state ready
    GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
    g_signal_connect (G_OBJECT (button_state_ready), "clicked",
        G_CALLBACK (button_state_ready_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_ready, 0, 1, 1, 1);
    gtk_widget_show (button_state_ready);

    //control state paused
    GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
    g_signal_connect (G_OBJECT (button_state_paused), "clicked",
        G_CALLBACK (button_state_paused_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_paused, 0, 2, 1, 1);
    gtk_widget_show (button_state_paused);

    //control state playing
    GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
    g_signal_connect (G_OBJECT (button_state_playing), "clicked",
        G_CALLBACK (button_state_playing_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_playing, 0, 3, 1, 1);
    gtk_widget_show (button_state_playing);

    //change framerate
    GtkWidget* slider_fps = gtk_scale_new_with_range (GTK_ORIENTATION_VERTICAL,
        1, 30, 2);
    g_signal_connect (G_OBJECT (slider_fps), "format-value",
        G_CALLBACK (slider_fps_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), slider_fps, 1, 0, 1, 3);
    gtk_widget_show (slider_fps);

    gtk_widget_show (table);
    gtk_widget_show (window_control);

    GstElement* videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc");
    GstElement* upload = gst_element_factory_make ("glupload", "glupload");
    GstElement* glfiltercube = gst_element_factory_make ("glfiltercube", "glfiltercube");
    GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink");

    GstCaps *caps = gst_caps_new_simple("video/x-raw",
                                        "width", G_TYPE_INT, 640,
                                        "height", G_TYPE_INT, 480,
                                        "framerate", GST_TYPE_FRACTION, 25, 1,
                                        "format", G_TYPE_STRING, "RGBA",
                                        NULL) ;

    gst_bin_add_many (GST_BIN (pipeline), videosrc, upload, glfiltercube, videosink, NULL);

    gboolean link_ok = gst_element_link_filtered(videosrc, upload, caps) ;
    gst_caps_unref(caps) ;
    if(!link_ok)
    {
        g_warning("Failed to link videosrc to glfiltercube!\n") ;
        return -1;
    }

    if(!gst_element_link_many(upload, glfiltercube, videosink, NULL))
    {
        g_warning("Failed to link glfiltercube to videosink!\n") ;
        return -1;
    }

    //set window id on this event
    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_signal_watch (bus);
    g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline);
    gst_object_unref (bus);

    //start
    GstStateChangeReturn ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_print ("Failed to start up pipeline!\n");
        return -1;
    }

    gtk_main();

    return 0;
}
Ejemplo n.º 16
0
/**
 * Constructor which create the GStreamer pipeline.
 * This pipeline grabs the video and render the OpenGL.
 */
Pipeline::Pipeline(Application* owner) :
        owner_(owner), 
        record_all_frames_enabled_(false)
{
    Configuration *config = owner_->get_configuration();
    bool verbose = config->get_verbose();
    set_intervalometer_is_on(false);
    pipeline_ = NULL;
    pipeline_ = GST_PIPELINE(gst_pipeline_new("pipeline"));

    GstElement* dv_demux0 = NULL;
    GstElement* hdv_decoder0 = NULL;
    GstElement* dv_videoscale0 = NULL;
    GstElement* dv_ffmpegcolorspace = NULL;
    GstElement* dvdec = NULL;
    GstElement* dv_queue0 = NULL;
    //GstElement* dv_queue1 = NULL;
    // capsfilter0, for the capture FPS and size
    GstElement* capsfilter0 = gst_element_factory_make ("capsfilter", NULL);

    bool is_dv_enabled = config->videoSource() == "dv";
    bool is_hdv_enabled = config->videoSource() == "hdv";
    // Video source element
    // TODO: add more input types like in Ekiga
    if (verbose)
        std::cout << "Video source: " << config->videoSource() << std::endl;
    if (config->videoSource() == "test")
    {
        videosrc_  = gst_element_factory_make("videotestsrc", "videosrc0");
    } 
    else if (config->videoSource() == "x") 
    {
        videosrc_  = gst_element_factory_make("ximagesrc", "videosrc0");
    } 
    else if (config->videoSource() == "dv") 
    {
        if (! Raw1394::cameraIsReady())
            g_error("There is no DV camera that is ready.");
        videosrc_  = gst_element_factory_make("dv1394src", "videosrc0");
        dv_demux0 = gst_element_factory_make("dvdemux", "dv_demux0");
        dv_queue0  = gst_element_factory_make("queue", "dv_queue0");
        dvdec = gst_element_factory_make("dvdec", "dvdec");
        dv_videoscale0 = gst_element_factory_make("videoscale", "dv_videoscale0");
        dv_ffmpegcolorspace = gst_element_factory_make("ffmpegcolorspace", "dv_ffmpegcolorspace");
        //dv_queue1  = gst_element_factory_make("queue", "dv_queue1");
        // register connection callback for the dvdemux element.
        // Note that the demuxer will be linked to whatever after it dynamically.
        // The reason is that the DV may contain various streams (for example
        // audio and video). The source pad(s) will be created at run time,
        // by the demuxer when it detects the amount and nature of streams.
        // Therefore we connect a callback function which will be executed
        // when the "pad-added" is emitted.
        g_signal_connect(dv_demux0, "pad-added",
            G_CALLBACK(cb_new_dvdemux_src_pad),
            static_cast<gpointer>(dv_queue0));
        //g_assert(dv_demux0);
    } 
    else if (config->videoSource() == "hdv") 
    {
        videosrc_  = gst_element_factory_make("hdv1394src", "videosrc0");
        hdv_decoder0 = gst_element_factory_make("decodebin", "hdv_decoder0");
        g_assert(hdv_decoder0);
    } 
    else  // v4l2src
    {
        // TODO:2010-08-06:aalex:We could rely on gstreamer-properties to configure the video source.
        // Add -d gconf (gconfvideosrc)
        std::string device_name(config->videoSource());
        if (verbose)
        {
            std::cout << "Video source: v4l2src with camera " << device_name << std::endl;
        }
        videosrc_  = gst_element_factory_make("v4l2src", "videosrc0"); 
        g_object_set(videosrc_, "device", device_name.c_str(), NULL); 
    }
    // TODO: use something else than g_assert to see if we could create the elements.
    g_assert(videosrc_); 

    // ffmpegcolorspace0 element
    GstElement* ffmpegcolorspace0 = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace0");
    g_assert(ffmpegcolorspace0);
    GstElement* tee0 = gst_element_factory_make("tee", "tee0");
    g_assert(tee0);
    GstElement* queue0 = gst_element_factory_make("queue", "queue0");
    g_assert(queue0);

    videosink_ = gst_element_factory_make ("cluttersink", NULL);
    g_object_set (videosink_, "texture", CLUTTER_TEXTURE(owner_->get_gui()->get_live_input_texture()), NULL);

    // TODO: Make sure the rendering FPS is constant, and not subordinate to
    // the FPS of the camera.

    // GdkPixbuf sink:
    GstElement* queue1 = gst_element_factory_make("queue", "queue1");
    g_assert(queue1);
    gdkpixbufsink_ = gst_element_factory_make("gdkpixbufsink", "gdkpixbufsink0");
    g_assert(gdkpixbufsink_);

    bool is_preview_enabled = config->get_preview_window_enabled();
    GstElement *queue2 = NULL;
    GstElement *ffmpegcolorspace1 = NULL;
    GstElement *xvimagesink = NULL;
    if (is_preview_enabled)
    {
        queue2 = gst_element_factory_make("queue", "queue2");
        g_assert(queue2);
        ffmpegcolorspace1 = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace1");
        g_assert(ffmpegcolorspace1);
        xvimagesink = gst_element_factory_make("xvimagesink", "xvimagesink0");
        g_assert(xvimagesink);
        g_object_set(xvimagesink, "force-aspect-ratio", TRUE, NULL);
    }

    // add elements
    gst_bin_add(GST_BIN(pipeline_), videosrc_); // capture
    gst_bin_add(GST_BIN(pipeline_), capsfilter0);
    if (is_dv_enabled)
    {
        gst_bin_add(GST_BIN(pipeline_), dv_demux0);
        gst_bin_add(GST_BIN(pipeline_), dv_queue0);
        gst_bin_add(GST_BIN(pipeline_), dvdec);
        gst_bin_add(GST_BIN(pipeline_), dv_ffmpegcolorspace);
        gst_bin_add(GST_BIN(pipeline_), dv_videoscale0);
        //gst_bin_add(GST_BIN(pipeline_), dv_queue1);
        // Set the capsfilter caps for DV:
    } 
    else if (is_hdv_enabled)
    {
        gst_bin_add(GST_BIN(pipeline_), hdv_decoder0);
    }
    gst_bin_add(GST_BIN(pipeline_), ffmpegcolorspace0);
    gst_bin_add(GST_BIN(pipeline_), tee0);
    gst_bin_add(GST_BIN(pipeline_), queue0); // branch #0: videosink
    //gst_bin_add(GST_BIN(pipeline_), capsfilter1);
    gst_bin_add(GST_BIN(pipeline_), videosink_);
    gst_bin_add(GST_BIN(pipeline_), queue1); // branch #1: gdkpixbufsink
    gst_bin_add(GST_BIN(pipeline_), gdkpixbufsink_);
    if (is_preview_enabled)
    {
        gst_bin_add(GST_BIN(pipeline_), queue2); // branch #2: xvimagesink
        gst_bin_add(GST_BIN(pipeline_), ffmpegcolorspace1);
        gst_bin_add(GST_BIN(pipeline_), xvimagesink);
    }

    // link pads:
    gboolean is_linked = FALSE; 
    if (config->videoSource() == std::string("test") || config->videoSource() == std::string("x")) 
    {
        if (config->videoSource() == std::string("x")) 
        {
            g_object_set(G_OBJECT(videosrc_), "endx", config->get_capture_width(), NULL);
            g_object_set(G_OBJECT(videosrc_), "endy", config->get_capture_height(), NULL);
            GstCaps *the_caps = gst_caps_from_string("video/x-raw-rgb, framerate=30/1");
            g_object_set(capsfilter0, "caps", the_caps, NULL);
            gst_caps_unref(the_caps);
            
        }
        else
        {   // it's a videotestsrc
            std::string caps_str = "video/x-raw-yuv, width=" + boost::lexical_cast<std::string>(config->get_capture_width()) + ", height=" + boost::lexical_cast<std::string>(config->get_capture_height()) + ", framerate=30/1";
            std::cout << "CAPS: " << caps_str << std::endl;
            GstCaps *the_caps = gst_caps_from_string(caps_str.c_str());
            g_object_set(capsfilter0, "caps", the_caps, NULL);
            gst_caps_unref(the_caps);
        }

        link_or_die(videosrc_, capsfilter0);
    } 
    else if (is_dv_enabled || is_hdv_enabled) 
    {
        if (is_dv_enabled)
        {
            link_or_die(videosrc_, dv_demux0);
            // dv_demux0 is linked to dvdec when its src pads appear
            link_or_die(dv_queue0, dvdec);
            link_or_die(dvdec, dv_ffmpegcolorspace);
            link_or_die(dv_ffmpegcolorspace, dv_videoscale0);
            link_or_die(dv_videoscale0, capsfilter0);
        } 
        else // hdv
        {
            g_error("HDV is not yet implemented."); // quits
        }
    } 
    else // it's a v4l2src
    {     
        bool source_is_linked = false;
        int frame_rate_index = 0;
        // Guess the right FPS to use with the video capture device
        while (not source_is_linked)
        {
            GstCaps *videocaps = gst_caps_from_string(guess_source_caps(frame_rate_index).c_str());
            g_object_set(capsfilter0, "caps", videocaps, NULL);
            gst_caps_unref(videocaps);
            is_linked = gst_element_link(videosrc_, capsfilter0);
            if (!is_linked) 
            { 
                std::cout << "Failed to link video source. Trying another framerate." << std::endl;
                ++frame_rate_index;
            }
            else 
            {
                if (verbose)
                    std::cout << "Success." << std::endl;
                source_is_linked = true;
            }
        }
    }
    //Will now link capfilter0--ffmpegcolorspace0--tee.
    link_or_die(capsfilter0, ffmpegcolorspace0);
    link_or_die(ffmpegcolorspace0, tee0);
    //Will now link tee--queue--videosink.
    is_linked = gst_element_link_pads(tee0, "src0", queue0, "sink");
    if (!is_linked) 
    {
        g_print("Could not link %s to %s.\n", "tee0", "sink"); 
        exit(1);
    }
    // output 0: the OpenGL uploader.
    link_or_die(queue0, videosink_);

    // output 1: the GdkPixbuf sink
    //Will now link tee--queue--pixbufsink.
    is_linked = gst_element_link_pads(tee0, "src1", queue1, "sink");
    if (!is_linked) 
    { 
        g_print("Could not link %s to %s.\n", "tee0", "queue1"); 
        exit(1); 
    }
    link_or_die(queue1, gdkpixbufsink_);

    if (is_preview_enabled)
    {
        
        is_linked = gst_element_link_pads(tee0, "src2", queue2, "sink");
        if (!is_linked) 
        { 
            g_print("Could not link %s to %s.\n", "tee0", "queue2"); 
            exit(1); 
        }
       
        is_linked = gst_element_link(queue2, ffmpegcolorspace1);
        if (!is_linked) 
        { 
            g_print("Could not link %s to %s.\n", "queue2", "ffmpegcolorspace1"); 
            exit(1); 
        }
        is_linked = gst_element_link(ffmpegcolorspace1, xvimagesink);
        if (!is_linked) 
        { 
            g_print("Could not link %s to %s.\n", "ffmpegcolorspace1", "xvimagesink0"); 
            exit(1); 
        }
    }

    if (verbose)
        std::cout << "Will now setup the pipeline bus." << std::endl;
    /* setup bus */
    GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
    gst_bus_add_signal_watch(bus);
    g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), this);
    g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), this);
    g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), this);
    g_signal_connect(bus, "message", G_CALLBACK(bus_message_cb), this);
    gst_object_unref(bus);

    /* run */
    GstStateChangeReturn ret;
    if (verbose)
        std::cout << "Set pipeline to READY" << std::endl;
    ret = gst_element_set_state(GST_ELEMENT(pipeline_), GST_STATE_READY);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_print("Failed to make the video pipeline ready!\n");
    }
    if (verbose)
        std::cout << "Set pipeline to PLAYING" << std::endl;
    ret = gst_element_set_state(GST_ELEMENT(pipeline_), GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_print("Failed to start the video pipeline!\n");
        g_print("-----------------------------------\n");
        /* check if there is an error message with details on the bus */
        GstMessage* msg = gst_bus_poll(bus, GST_MESSAGE_ERROR, 0);
        if (msg)
        {
          GError *err = NULL;
          gst_message_parse_error(msg, &err, NULL);
          g_print("ERROR: %s\n", err->message);
          g_error_free(err);
          gst_message_unref(msg);
        }
        g_print("-----------------------------------\n");
        exit(1);
        //FIXME: causes a segfault: context->owner_->quit();
    }
    if (verbose)
        std::cout << "Successfully started the pipeline." << std::endl;
}
Ejemplo n.º 17
0
gint
main (gint argc, gchar ** argv)
{
  GstElement *pipeline;
  GstElement *shapewipe;
  GstControlSource *cs;
  GMainLoop *loop;
  GstBus *bus;
  gchar *pipeline_string;
  gfloat border = 0.05;

  if (argc < 2) {
    g_print ("Usage: shapewipe mask.png <border>\n");
    return -1;
  }

  gst_init (&argc, &argv);

  if (argc > 2) {
    border = atof (argv[2]);
  }

  pipeline_string =
      g_strdup_printf
      ("videotestsrc ! video/x-raw,format=(string)AYUV,width=640,height=480 ! shapewipe name=shape border=%f ! videomixer name=mixer ! videoconvert ! autovideosink     filesrc location=%s ! typefind ! decodebin2 ! videoconvert ! videoscale ! queue ! shape.mask_sink    videotestsrc pattern=snow ! video/x-raw,format=(string)AYUV,width=640,height=480 ! queue ! mixer.",
      border, argv[1]);

  pipeline = gst_parse_launch (pipeline_string, NULL);
  g_free (pipeline_string);

  if (pipeline == NULL) {
    g_print ("Failed to create pipeline\n");
    return -2;
  }

  shapewipe = gst_bin_get_by_name (GST_BIN (pipeline), "shape");

  cs = gst_lfo_control_source_new ();

  gst_object_add_control_binding (GST_OBJECT_CAST (shapewipe),
      gst_direct_control_binding_new (GST_OBJECT_CAST (shapewipe), "position",
          cs));

  g_object_set (cs,
      "amplitude", 0.5,
      "offset", 0.5, "frequency", 0.25, "timeshift", 500 * GST_MSECOND, NULL);

  g_object_unref (cs);

  loop = g_main_loop_new (NULL, FALSE);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (on_message), loop);
  gst_object_unref (GST_OBJECT (bus));

  if (gst_element_set_state (pipeline,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
    g_error ("Failed to go into PLAYING state");
    return -4;
  }

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_main_loop_unref (loop);

  gst_object_unref (G_OBJECT (pipeline));

  return 0;
}
Ejemplo n.º 18
0
static GtkWidget *
get_voice_test_frame(PurplePlugin *plugin)
{
	GtkWidget *vbox = gtk_vbox_new(FALSE, PIDGIN_HIG_BORDER);
	GtkWidget *level = gtk_progress_bar_new();
	GtkWidget *volume = gtk_hscale_new_with_range(0, 100, 1);
	GtkWidget *threshold = gtk_hscale_new_with_range(0, 100, 1);
	GtkWidget *label;
	GtkTable *table = GTK_TABLE(gtk_table_new(2, 2, FALSE));

	GstElement *pipeline;
	GstBus *bus;
	BusCbCtx *ctx;

	g_object_set(vbox, "width-request", 500, NULL);

	gtk_table_set_row_spacings(table, PIDGIN_HIG_BOX_SPACE);
	gtk_table_set_col_spacings(table, PIDGIN_HIG_BOX_SPACE);

	label = gtk_label_new(_("Volume:"));
	g_object_set(label, "xalign", 0.0, NULL);
	gtk_table_attach(table, label, 0, 1, 0, 1, GTK_FILL, 0, 0, 0);
	gtk_table_attach_defaults(table, volume, 1, 2, 0, 1);
	label = gtk_label_new(_("Silence threshold:"));
	g_object_set(label, "xalign", 0.0, "yalign", 1.0, NULL);
	gtk_table_attach(table, label, 0, 1, 1, 2, GTK_FILL, GTK_FILL, 0, 0);
	gtk_table_attach_defaults(table, threshold, 1, 2, 1, 2);

	gtk_container_add(GTK_CONTAINER(vbox), level);
	gtk_container_add(GTK_CONTAINER(vbox), GTK_WIDGET(table));
	gtk_widget_show_all(vbox);

	pipeline = create_pipeline();
	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	gst_bus_add_signal_watch(bus);
	ctx = g_new(BusCbCtx, 1);
	ctx->level = GTK_PROGRESS_BAR(level);
	ctx->threshold = GTK_RANGE(threshold);
	g_signal_connect_data(bus, "message", G_CALLBACK(gst_bus_cb),
			ctx, (GClosureNotify)g_free, 0);
	gst_object_unref(bus);

	g_signal_connect(volume, "value-changed",
			(GCallback)on_volume_change_cb, pipeline);

	gtk_range_set_value(GTK_RANGE(volume),
			purple_prefs_get_int("/purple/media/audio/volume/input"));
	gtk_widget_set(volume, "draw-value", FALSE, NULL);

	gtk_range_set_value(GTK_RANGE(threshold),
			purple_prefs_get_int("/purple/media/audio/silence_threshold"));

	g_signal_connect(vbox, "destroy",
			G_CALLBACK(voice_test_frame_destroy_cb), pipeline);
	g_signal_connect(volume, "destroy",
			G_CALLBACK(volume_scale_destroy_cb), NULL);
	g_signal_connect(threshold, "format-value",
			G_CALLBACK(threshold_value_format_cb), NULL);
	g_signal_connect(threshold, "destroy",
			G_CALLBACK(threshold_scale_destroy_cb), NULL);

	return vbox;
}
Ejemplo n.º 19
0
GST_END_TEST
GST_START_TEST (connect_chain_of_elements)
{
  gchar *padname = NULL;
  KmsConnectData *data1, *data2;
  gchar *filter_factory;
  GstBus *bus;

  loop = g_main_loop_new (NULL, TRUE);
  pipeline = gst_pipeline_new (__FUNCTION__);
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (bus_msg), pipeline);

  data1 = kms_connect_data_create (0);
  data2 = kms_connect_data_create (MAX_CHECKS);
  data2->data_probe = (KmsProbeType) data_probe_cb;
  data2->audio_probe = (KmsProbeType) audio_probe_cb;
  data2->video_probe = (KmsProbeType) video_probe_cb;

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  data1->src = gst_element_factory_make ("dummysrc", NULL);
  data1->sink = gst_element_factory_make ("filterelement", NULL);

  data2->src = data1->sink;
  data2->sink = gst_element_factory_make ("dummysink", NULL);

  g_signal_connect (data1->src, "pad-added", G_CALLBACK (src_pads_added),
      data1);
  g_signal_connect (data1->sink, "pad-added", G_CALLBACK (sink_pads_added),
      data1);

  g_signal_connect (data2->src, "pad-added", G_CALLBACK (src_pads_added),
      data2);
  g_signal_connect (data2->sink, "pad-added", G_CALLBACK (sink_pads_added),
      data2);
  g_signal_connect (data2->sink, "pad-removed", G_CALLBACK (sink_pads_removed),
      data2);

  gst_bin_add_many (GST_BIN (pipeline), data1->src, data1->sink, data2->sink,
      NULL);

  /*******************************/
  /* Connect dummysrc to filter  */
  /*******************************/

  /* request src pad using action */
  g_signal_emit_by_name (data1->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_VIDEO, NULL, GST_PAD_SRC, &data1->video_src);
  fail_if (data1->video_src == NULL);
  g_signal_emit_by_name (data1->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &data1->audio_src);
  fail_if (data1->audio_src == NULL);
  g_signal_emit_by_name (data1->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_DATA, NULL, GST_PAD_SRC, &data1->data_src);
  fail_if (data1->data_src == NULL);

  GST_DEBUG ("Video pad name %s", data1->video_src);
  GST_DEBUG ("Audio pad name %s", data1->audio_src);
  GST_DEBUG ("Data pad name %s", data1->data_src);

  filter_factory = "videoflip";
  GST_DEBUG ("Setting property uri to : %s", filter_factory);
  g_object_set (G_OBJECT (data1->sink), "filter_factory", filter_factory, NULL);

  g_object_set (G_OBJECT (data1->src), "video", TRUE, "audio", TRUE, "data",
      TRUE, NULL);

  /*******************************/
  /* Connect filter to dummysink */
  /*******************************/

  /* request src pad using action */
  g_signal_emit_by_name (data2->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_VIDEO, NULL, GST_PAD_SRC, &data2->video_src);
  fail_if (data2->video_src == NULL);
  g_signal_emit_by_name (data2->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_AUDIO, NULL, GST_PAD_SRC, &data2->audio_src);
  fail_if (data2->audio_src == NULL);
  g_signal_emit_by_name (data2->src, "request-new-pad",
      KMS_ELEMENT_PAD_TYPE_DATA, NULL, GST_PAD_SRC, &data2->data_src);
  fail_if (data2->data_src == NULL);

  GST_DEBUG ("Video pad name %s", data2->video_src);
  GST_DEBUG ("Audio pad name %s", data2->audio_src);
  GST_DEBUG ("Data pad name %s", data2->data_src);

  g_object_set (G_OBJECT (data2->sink), "video", TRUE, "audio", TRUE, "data",
      TRUE, NULL);

  g_timeout_add_seconds (4, print_timedout_pipeline, NULL);

  gst_element_sync_state_with_parent (data1->src);
  gst_element_sync_state_with_parent (data1->sink);
  gst_element_sync_state_with_parent (data2->sink);

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_bus_remove_signal_watch (bus);
  g_object_unref (bus);
  g_object_unref (pipeline);
  g_free (padname);
  g_main_loop_unref (loop);
  kms_connect_data_destroy (data1);
  kms_connect_data_destroy (data2);
}
gint
main (gint argc, gchar * argv[])
{
  LocalState state;
  GtkWidget *area, *combo, *w;
  const gchar *uri;

  XInitThreads ();

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  if (argc < 2) {
    g_print ("Usage: 3dvideo <uri-to-play>\n");
    return 1;
  }

  uri = argv[1];

  GstElement *pipeline = gst_element_factory_make ("playbin", NULL);
  GstBin *sinkbin = (GstBin *) gst_parse_bin_from_description ("glupload ! glcolorconvert ! glviewconvert name=viewconvert ! glimagesink name=sink", TRUE, NULL);
#if USE_GLCONVERT_FOR_INPUT
  GstElement *glconvert = gst_bin_get_by_name (sinkbin, "viewconvert");
#endif
  GstElement *videosink = gst_bin_get_by_name (sinkbin, "sink");

  /* Get defaults */
  g_object_get (pipeline, "video-multiview-mode", &state.in_mode,
      "video-multiview-flags", &state.in_flags, NULL);
  gst_child_proxy_get (GST_CHILD_PROXY (videosink), "sink::output-multiview-mode", &state.out_mode,
      "sink::output-multiview-flags", &state.out_flags, NULL);

  detect_mode_from_uri (&state, uri);

  g_return_val_if_fail (pipeline != NULL, 1);
  g_return_val_if_fail (videosink != NULL, 1);

  g_object_set (G_OBJECT (pipeline), "video-sink", sinkbin, NULL);
  g_object_set (G_OBJECT (pipeline), "uri", uri, NULL);

#if USE_GLCONVERT_FOR_INPUT
  g_object_set (G_OBJECT (glconvert), "input-mode-override", state.in_mode,
      NULL);
  g_object_set (G_OBJECT (glconvert), "input-flags-override", state.in_flags,
      NULL);
#else
  g_object_set (G_OBJECT (pipeline), "video-multiview-mode", state.in_mode,
      NULL);
  g_object_set (G_OBJECT (pipeline), "video-multiview-flags", state.in_flags,
      NULL);
#endif

  /* Connect to bus for signal handling */
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), pipeline);

  gst_element_set_state (pipeline, GST_STATE_READY);

  area = gtk_drawing_area_new ();
  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area, NULL);
  gst_object_unref (bus);

  /* Toplevel window */
  GtkWidget *window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_default_size (GTK_WINDOW (window), 800, 600);
  gtk_window_set_title (GTK_WINDOW (window), "Stereoscopic video demo");
  GdkGeometry geometry;
  geometry.min_width = 1;
  geometry.min_height = 1;
  geometry.max_width = -1;
  geometry.max_height = -1;
  gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry,
      GDK_HINT_MIN_SIZE);

  GtkWidget *vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 2);
  gtk_container_add (GTK_CONTAINER (window), vbox);

  /* area where the video is drawn */
  gtk_box_pack_start (GTK_BOX (vbox), area, TRUE, TRUE, 0);

  /* Buttons to control the pipeline state */
  GtkWidget *table = gtk_grid_new ();
  gtk_container_add (GTK_CONTAINER (vbox), table);

  GtkWidget *button_state_ready = gtk_button_new_with_label ("Stop");
  g_signal_connect (G_OBJECT (button_state_ready), "clicked",
      G_CALLBACK (button_state_ready_cb), pipeline);
  gtk_grid_attach (GTK_GRID (table), button_state_ready, 1, 0, 1, 1);
  gtk_widget_show (button_state_ready);

  //control state paused
  GtkWidget *button_state_paused = gtk_button_new_with_label ("Pause");
  g_signal_connect (G_OBJECT (button_state_paused), "clicked",
      G_CALLBACK (button_state_paused_cb), pipeline);
  gtk_grid_attach (GTK_GRID (table), button_state_paused, 2, 0, 1, 1);
  gtk_widget_show (button_state_paused);

  //control state playing
  GtkWidget *button_state_playing = gtk_button_new_with_label ("Play");
  g_signal_connect (G_OBJECT (button_state_playing), "clicked",
      G_CALLBACK (button_state_playing_cb), pipeline);
  gtk_grid_attach (GTK_GRID (table), button_state_playing, 3, 0, 1, 1);
  //gtk_widget_show (button_state_playing);

  w = gst_mview_widget_new (FALSE);
  combo = GST_MVIEW_WIDGET (w)->mode_selector;
  gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo),
      enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING,
          state.in_mode));
#if USE_GLCONVERT_FOR_INPUT
  g_signal_connect (G_OBJECT (combo), "changed",
      G_CALLBACK (set_mview_input_mode), glconvert);
#else
  g_signal_connect (G_OBJECT (combo), "changed",
      G_CALLBACK (set_mview_input_mode), pipeline);
#endif

  g_object_set (G_OBJECT (w), "flags", state.in_flags, NULL);
#if USE_GLCONVERT_FOR_INPUT
  g_signal_connect (G_OBJECT (w), "notify::flags",
      G_CALLBACK (input_flags_changed), glconvert);
#else
  g_signal_connect (G_OBJECT (w), "notify::flags",
      G_CALLBACK (input_flags_changed), pipeline);
#endif
  gtk_container_add (GTK_CONTAINER (vbox), w);

  w = gst_mview_widget_new (TRUE);
  combo = GST_MVIEW_WIDGET (w)->mode_selector;
  gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo),
      enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_MODE, state.out_mode));
  g_signal_connect (G_OBJECT (combo), "changed",
      G_CALLBACK (set_mview_output_mode), videosink);

  g_object_set (G_OBJECT (w), "flags", state.out_flags, NULL);
  g_signal_connect (G_OBJECT (w), "notify::flags",
      G_CALLBACK (output_flags_changed), videosink);
  g_signal_connect (G_OBJECT (w), "notify::downmix-mode",
      G_CALLBACK (downmix_method_changed), videosink);
  gtk_container_add (GTK_CONTAINER (vbox), w);

  //configure the pipeline
  g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb),
      pipeline);

  gtk_widget_realize (area);

  /* Redraw needed when paused or stopped (PAUSED or READY) */
  g_signal_connect (area, "draw", G_CALLBACK (draw_cb), videosink);
  g_signal_connect(area, "configure-event", G_CALLBACK(resize_cb), videosink);

  gtk_widget_show_all (window);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  gtk_main ();

  return 0;
}
Ejemplo n.º 21
0
int main(int argc, char *argv[]) {
	CustomData data;
	GstStateChangeReturn ret;
	GstBus *bus;

	/* Initialize GTK */
	gtk_init (&argc, &argv);

	/* Initialize GStreamer */
	gst_init (&argc, &argv);

	/* Initialize our data structure */
	memset (&data, 0, sizeof (data));
	data.duration = GST_CLOCK_TIME_NONE;

	/* Create the elements */
	data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");

	if (!data.playbin2) {
		g_printerr ("Not all elements could be created.\n");
		return -1;
	}

	/* Set the URI to play */
	g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_cropped_multilingual.webm", NULL);

	/* Connect to interesting signals in playbin2 */
	g_signal_connect (G_OBJECT (data.playbin2), "video-tags-changed", (GCallback) tags_cb, &data);
	g_signal_connect (G_OBJECT (data.playbin2), "audio-tags-changed", (GCallback) tags_cb, &data);
	g_signal_connect (G_OBJECT (data.playbin2), "text-tags-changed", (GCallback) tags_cb, &data);

	/* Create the GUI */
	create_ui (&data);

	/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
	bus = gst_element_get_bus (data.playbin2);
	gst_bus_add_signal_watch (bus);
	g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
	g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, &data);
	g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data);
	g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data);
	gst_object_unref (bus);

	/* Start playing */
	ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE) {
		g_printerr ("Unable to set the pipeline to the playing state.\n");
		gst_object_unref (data.playbin2);
		return -1;
	}

	/* Register a function that GLib will call every second */
	g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);

	/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
	gtk_main ();

	/* Free resources */
	gst_element_set_state (data.playbin2, GST_STATE_NULL);
	gst_object_unref (data.playbin2);
	return 0;
}
Ejemplo n.º 22
0
gboolean
ges_generate_test_file_audio_video (const gchar * filedest,
    const gchar * audio_enc,
    const gchar * video_enc,
    const gchar * mux, const gchar * video_pattern, const gchar * audio_wave)
{
  GError *error = NULL;
  GstElement *pipeline;
  GstBus *bus;
  GstMessage *message;
  gchar *pipeline_str;
  gboolean done = FALSE;
  gboolean ret = FALSE;

  if (g_file_test (filedest, G_FILE_TEST_EXISTS)) {
    GST_INFO ("The file %s already existed.", filedest);
    return TRUE;
  }

  pipeline_str = g_strdup_printf ("audiotestsrc num-buffers=430 wave=%s "
      "%c %s ! %s name=m ! filesink location= %s/%s "
      "videotestsrc pattern=%s num-buffers=300 ! %s ! m.",
      audio_wave,
      audio_enc ? '!' : ' ',
      audio_enc ? audio_enc : "",
      mux, g_get_current_dir (), filedest, video_pattern, video_enc);

  pipeline = gst_parse_launch (pipeline_str, &error);

  if (pipeline == NULL)
    return FALSE;

  g_free (pipeline_str);

  bus = gst_element_get_bus (GST_ELEMENT (pipeline));
  gst_bus_add_signal_watch (bus);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  while (!done) {
    message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_CLOCK_TIME_NONE);
    if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_EOS) {
      done = TRUE;
      ret = TRUE;
    } else if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR) {
      gchar *debug = NULL;
      GError *err = NULL;

      gst_message_parse_error (message, &err, &debug);
      done = TRUE;
      ret = FALSE;
      GST_ERROR ("Got error %s from %s fron the bus while generation: %s"
          "debug infos: %s", GST_OBJECT_NAME (message->src), err->message,
          debug ? debug : "none", filedest);
      g_clear_error (&err);
      g_free (debug);
    }
  }

  gst_bus_remove_signal_watch (bus);
  gst_object_unref (bus);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return ret;
}
Ejemplo n.º 23
0
static void
transcode_file (gchar * uri, gchar * outputuri, GstEncodingProfile * prof)
{
    GstElement *pipeline;
    GstElement *src;
    GstElement *ebin;
    GstElement *sink;
    GstBus *bus;
    GstCaps *profilecaps, *rescaps;
    GMainLoop *mainloop;

    g_print (" Input URI  : %s\n", uri);
    g_print (" Output URI : %s\n", outputuri);

    sink = gst_element_make_from_uri (GST_URI_SINK, outputuri, "sink", NULL);
    if (G_UNLIKELY (sink == NULL)) {
        g_print ("Can't create output sink, most likely invalid output URI !\n");
        return;
    }

    src = gst_element_factory_make ("uridecodebin", NULL);
    if (G_UNLIKELY (src == NULL)) {
        g_print ("Can't create uridecodebin for input URI, aborting!\n");
        return;
    }

    /* Figure out the streams that can be passed as-is to encodebin */
    g_object_get (src, "caps", &rescaps, NULL);
    rescaps = gst_caps_copy (rescaps);
    profilecaps = gst_encoding_profile_get_input_caps (prof);
    gst_caps_append (rescaps, profilecaps);

    /* Set properties */
    g_object_set (src, "uri", uri, "caps", rescaps, NULL);

    ebin = gst_element_factory_make ("encodebin", NULL);
    g_object_set (ebin, "profile", prof, NULL);

    g_signal_connect (src, "autoplug-continue", G_CALLBACK (autoplug_continue_cb),
                      ebin);
    g_signal_connect (src, "pad-added", G_CALLBACK (pad_added_cb), ebin);

    pipeline = gst_pipeline_new ("encoding-pipeline");

    gst_bin_add_many (GST_BIN (pipeline), src, ebin, sink, NULL);

    gst_element_link (ebin, sink);

    mainloop = g_main_loop_new (NULL, FALSE);

    bus = gst_pipeline_get_bus ((GstPipeline *) pipeline);
    gst_bus_add_signal_watch (bus);
    g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop);

    if (gst_element_set_state (pipeline,
                               GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        g_print ("Failed to start the encoding\n");
        return;
    }

    g_main_loop_run (mainloop);

    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
}
gint main (gint argc, gchar *argv[])
{
#ifdef HAVE_X11
    XInitThreads();
#endif

    gtk_init (&argc, &argv);
    gst_init (&argc, &argv);

    GstElement* pipeline = gst_pipeline_new ("pipeline");

    //window that contains several ares where the video is drawn
    GtkWidget* window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    gtk_widget_set_size_request (window, 640, 240);
    gtk_window_move (GTK_WINDOW (window), 300, 10);
    gtk_window_set_title (GTK_WINDOW (window), "click on left, right or outside the main window to switch the drawing area");
    GdkGeometry geometry;
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry, GDK_HINT_MIN_SIZE);

    //window to control the states
    GtkWidget* window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    geometry.min_width = 1;
    geometry.min_height = 1;
    geometry.max_width = -1;
    geometry.max_height = -1;
    gtk_window_set_geometry_hints (GTK_WINDOW (window_control), window_control, &geometry, GDK_HINT_MIN_SIZE);
    gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
    gtk_window_move (GTK_WINDOW (window_control), 10, 10);
    GtkWidget* table = gtk_grid_new ();
    gtk_container_add (GTK_CONTAINER (window_control), table);

    //control state null
    GtkWidget* button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
    g_signal_connect (G_OBJECT (button_state_null), "clicked",
        G_CALLBACK (button_state_null_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_null, 0, 0, 1, 1);
    gtk_widget_show (button_state_null);

    //control state ready
    GtkWidget* button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
    g_signal_connect (G_OBJECT (button_state_ready), "clicked",
        G_CALLBACK (button_state_ready_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_ready, 0, 1, 1, 1);
    gtk_widget_show (button_state_ready);

    //control state paused
    GtkWidget* button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
    g_signal_connect (G_OBJECT (button_state_paused), "clicked",
        G_CALLBACK (button_state_paused_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_paused, 0, 2, 1, 1);
    gtk_widget_show (button_state_paused);

    //control state playing
    GtkWidget* button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
    g_signal_connect (G_OBJECT (button_state_playing), "clicked",
        G_CALLBACK (button_state_playing_cb), pipeline);
    gtk_grid_attach (GTK_GRID (table), button_state_playing, 0, 3, 1, 1);
    gtk_widget_show (button_state_playing);

    gtk_widget_show (table);
    gtk_widget_show (window_control);

    //configure the pipeline
    g_signal_connect(G_OBJECT(window), "delete-event", G_CALLBACK(destroy_cb), pipeline);

    GstElement* videosrc  = gst_element_factory_make ("videotestsrc", "videotestsrc");
    GstElement* videosink = gst_element_factory_make ("glimagesink", "glimagesink");

    gst_bin_add_many (GST_BIN (pipeline), videosrc, videosink, NULL);

    gboolean link_ok = gst_element_link_many(videosrc, videosink, NULL);
    if(!link_ok)
    {
        g_warning("Failed to link videosrc to videosink!\n") ;
        return -1;
    }

    //areas where the video is drawn
    GtkWidget* table_areas = gtk_grid_new ();
    gtk_container_add (GTK_CONTAINER (window), table_areas);
    GtkWidget* area_top_left = gtk_drawing_area_new();
    gtk_widget_add_events(area_top_left, GDK_BUTTON_PRESS_MASK);
    gtk_widget_set_size_request (area_top_left, 320, 240);
    gtk_grid_attach (GTK_GRID (table_areas), area_top_left, 0, 0, 1, 1);
    GtkWidget* area_top_right = gtk_drawing_area_new();
    gtk_widget_add_events(area_top_right, GDK_BUTTON_PRESS_MASK);
    gtk_widget_set_size_request (area_top_right, 320, 240);
    gtk_grid_attach (GTK_GRID (table_areas), area_top_right, 1, 0, 1, 1);

    gtk_widget_set_redraw_on_allocate (area_top_left, TRUE);
    gtk_widget_set_redraw_on_allocate (area_top_right, TRUE);
    gtk_widget_realize(area_top_left);
    gtk_widget_realize(area_top_right);

    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area_top_right, NULL);
    gst_bus_add_signal_watch (bus);
    g_signal_connect(bus, "message::error", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::warning", G_CALLBACK(end_stream_cb), pipeline);
    g_signal_connect(bus, "message::eos", G_CALLBACK(end_stream_cb), pipeline);
    gst_object_unref (bus);

    //needed when being in GST_STATE_READY, GST_STATE_PAUSED
    //or resizing/obscuring the window
    g_signal_connect(area_top_left, "draw", G_CALLBACK(expose_cb), videosink);
    g_signal_connect(area_top_left, "configure-event", G_CALLBACK(resize_cb), videosink);
    g_signal_connect(area_top_right, "draw", G_CALLBACK(expose_cb), videosink);
    g_signal_connect(area_top_right, "configure-event", G_CALLBACK(resize_cb), videosink);

    //switch the drawing area
    g_signal_connect(area_top_left, "button-press-event", G_CALLBACK(on_click_drawing_area), videosink);
    g_signal_connect(area_top_right, "button-press-event", G_CALLBACK(on_click_drawing_area), videosink);

    gtk_widget_show_all (window);

    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    gtk_main();

    return 0;
}
Ejemplo n.º 25
0
static void
_test_negotiation (const gchar * src_templ, const gchar * sink_templ,
    gint width, gint height, gint par_n, gint par_d)
{
  GstElement *pipeline;
  GstElement *src, *capsfilter1, *scale, *capsfilter2, *sink;
  GstBus *bus;
  GMainLoop *loop;
  GstCaps *caps;
  TestNegotiationData data = { 0, 0, 0, 0, FALSE, NULL };
  GstPad *pad;

  GST_DEBUG ("Running test for src templ caps '%s' and sink templ caps '%s'",
      src_templ, sink_templ);

  pipeline = gst_element_factory_make ("pipeline", "pipeline");
  fail_unless (pipeline != NULL);

  src = gst_element_factory_make ("videotestsrc", "src");
  fail_unless (src != NULL);
  g_object_set (G_OBJECT (src), "num-buffers", 1, NULL);

  capsfilter1 = gst_element_factory_make ("capsfilter", "filter1");
  fail_unless (capsfilter1 != NULL);
  caps = gst_caps_from_string (src_templ);
  fail_unless (caps != NULL);
  g_object_set (G_OBJECT (capsfilter1), "caps", caps, NULL);
  gst_caps_unref (caps);

  scale = gst_element_factory_make ("videoscale", "scale");
  fail_unless (scale != NULL);

  capsfilter2 = gst_element_factory_make ("capsfilter", "filter2");
  fail_unless (capsfilter2 != NULL);
  caps = gst_caps_from_string (sink_templ);
  fail_unless (caps != NULL);
  g_object_set (G_OBJECT (capsfilter2), "caps", caps, NULL);
  gst_caps_unref (caps);

  pad = gst_element_get_static_pad (capsfilter2, "sink");
  fail_unless (pad != NULL);
  g_signal_connect (pad, "notify::caps",
      G_CALLBACK (_test_negotiation_notify_caps), &data);
  gst_object_unref (pad);

  sink = gst_element_factory_make ("fakesink", "sink");
  fail_unless (sink != NULL);
  g_object_set (sink, "async", FALSE, NULL);

  gst_bin_add_many (GST_BIN (pipeline), src, capsfilter1, scale, capsfilter2,
      sink, NULL);

  fail_unless (gst_element_link_pads_full (src, "src", capsfilter1, "sink",
          LINK_CHECK_FLAGS));
  fail_unless (gst_element_link_pads_full (capsfilter1, "src", scale, "sink",
          LINK_CHECK_FLAGS));
  fail_unless (gst_element_link_pads_full (scale, "src", capsfilter2, "sink",
          LINK_CHECK_FLAGS));
  fail_unless (gst_element_link_pads_full (capsfilter2, "src", sink, "sink",
          LINK_CHECK_FLAGS));

  loop = g_main_loop_new (NULL, FALSE);

  bus = gst_element_get_bus (pipeline);
  fail_unless (bus != NULL);
  gst_bus_add_signal_watch (bus);

  data.loop = loop;
  data.width = width;
  data.height = height;
  data.par_n = par_n;
  data.par_d = par_d;
  data.ok = FALSE;

  g_signal_connect (bus, "message", G_CALLBACK (_test_negotiation_message),
      &data);

  gst_object_unref (bus);

  fail_unless (gst_element_set_state (pipeline,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);

  g_main_loop_run (loop);

  fail_unless (data.ok == TRUE);

  fail_unless (gst_element_set_state (pipeline,
          GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS);

  gst_object_unref (pipeline);
  g_main_loop_unref (loop);
}
Ejemplo n.º 26
0
gint
main (gint argc, gchar ** argv)
{
    GstElement *pipeline, *src, *mxfdemux;
    GstBus *bus;
    GtkWidget *scrolled_window, *treeview;

    if (argc < 2) {
        g_print ("usage: %s MXF-FILE\n", argv[0]);
        return -1;
    }

    if (!g_thread_supported ())
        g_thread_init (NULL);

    gst_init (NULL, NULL);
    gtk_init (NULL, NULL);

    pipeline = gst_pipeline_new ("pipeline");

    src = gst_element_factory_make ("filesrc", "src");
    g_object_set (G_OBJECT (src), "location", argv[1], NULL);

    mxfdemux = gst_element_factory_make ("mxfdemux", "mxfdemux");
    g_signal_connect (mxfdemux, "pad-added", G_CALLBACK (on_pad_added), NULL);

    if (!src || !mxfdemux) {
        g_error ("Unable to create all elements");
        return -2;
    }

    gst_bin_add_many (GST_BIN (pipeline), src, mxfdemux, NULL);
    if (!gst_element_link_many (src, mxfdemux, NULL)) {
        g_error ("Failed to link elements");
        return -3;
    }

    bus = gst_element_get_bus (pipeline);
    gst_bus_add_signal_watch (bus);
    g_signal_connect (bus, "message", G_CALLBACK (on_message), NULL);
    gst_object_unref (bus);

    window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
    gtk_window_set_default_size (GTK_WINDOW (window), 640, 480);
    g_signal_connect (window, "delete-event", gtk_main_quit, NULL);

    scrolled_window = gtk_scrolled_window_new (NULL, NULL);

    treestore = gtk_tree_store_new (1, G_TYPE_STRING, NULL);
    treeview = gtk_tree_view_new_with_model (GTK_TREE_MODEL (treestore));

    gtk_tree_view_append_column (GTK_TREE_VIEW (treeview),
                                 gtk_tree_view_column_new_with_attributes ("Element",
                                         gtk_cell_renderer_text_new (), "text", 0, NULL));

    gtk_container_add (GTK_CONTAINER (scrolled_window), treeview);
    gtk_container_add (GTK_CONTAINER (window), scrolled_window);

    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    gtk_main ();

    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);

    return 0;
}
Ejemplo n.º 27
0
static void
build_pipeline (SjExtractor *extractor)
{
  SjExtractorPrivate *priv;
  GstBus *bus;

  g_return_if_fail (SJ_IS_EXTRACTOR (extractor));

  priv = extractor->priv;

  if (priv->pipeline != NULL) {
    gst_object_unref (GST_OBJECT (priv->pipeline));
  }
  priv->pipeline = gst_pipeline_new ("pipeline");
  bus = gst_element_get_bus (priv->pipeline);
  gst_bus_add_signal_watch (bus);

  g_signal_connect (G_OBJECT (bus), "message::error", G_CALLBACK (error_cb), extractor);

  /* Read from CD */
  priv->cdsrc = gst_element_make_from_uri (GST_URI_SRC, "cdda://1", "cd_src", NULL);
  if (priv->cdsrc == NULL) {
    g_set_error (&priv->construct_error,
                 SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
                 _("Could not create GStreamer CD reader"));
    return;
  }

  g_object_set (G_OBJECT (priv->cdsrc), "device", priv->device_path, NULL);
  if (g_object_class_find_property (G_OBJECT_GET_CLASS (priv->cdsrc), "paranoia-mode")) {
	  g_object_set (G_OBJECT (priv->cdsrc), "paranoia-mode", priv->paranoia_mode, NULL);
  }

  /* Get the track format for seeking later */
  priv->track_format = gst_format_get_by_nick ("track");
  g_assert (priv->track_format != 0);

  /* Encode */
  priv->encodebin = build_encoder (extractor);
  if (priv->encodebin == NULL) {
    g_set_error (&priv->construct_error,
                 SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
                 _("Could not create GStreamer encoders for %s"),
                 gst_encoding_profile_get_name (priv->profile));
    return;
  }
  /* Connect to the eos so we know when its finished */
  g_signal_connect (bus, "message::eos", G_CALLBACK (eos_cb), extractor);

  /* Write to disk */
  priv->filesink = gst_element_factory_make (FILE_SINK, "file_sink");
  if (priv->filesink == NULL) {
    g_set_error (&priv->construct_error,
                 SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
                 _("Could not create GStreamer file output"));
    return;
  }
#if 0
  g_signal_connect (G_OBJECT (priv->filesink), "allow-overwrite", G_CALLBACK (just_say_yes), extractor);
#endif

  /* Add the elements to the pipeline */
  gst_bin_add_many (GST_BIN (priv->pipeline), priv->cdsrc, priv->encodebin, priv->filesink, NULL);

  /* Link it all together */
  if (!gst_element_link_many (priv->cdsrc, priv->encodebin, priv->filesink, NULL)) {
    g_set_error (&priv->construct_error,
                 SJ_ERROR, SJ_ERROR_INTERNAL_ERROR,
                 _("Could not link pipeline"));
    return;
  }

  priv->rebuild_pipeline = FALSE;
}
Ejemplo n.º 28
0
gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline;

/*** block d  from ../../../docs/manual/advanced-position.xml ***/
  GstStateChangeReturn ret;
  GMainLoop *loop;
  GError *err = NULL;
  GstBus *bus;
  gchar *l;

  /* init */
  gst_init (&argc, &argv);

  /* args */
  if (argc != 2) {
    g_print ("Usage: %s <filename>\n", argv[0]);
    return -1;
  }

  loop = g_main_loop_new (NULL, FALSE);

  /* build pipeline, the easy way */
  l = g_strdup_printf ("filesrc location=\"%s\" ! oggdemux ! vorbisdec ! "
		       "audioconvert ! audioresample ! alsasink",
		       argv[1]);
  pipeline = gst_parse_launch (l, &err);
  if (pipeline == NULL || err != NULL) {
    g_printerr ("Cannot build pipeline: %s\n", err->message);
    g_error_free (err);
    g_free (l);
    if (pipeline)
      gst_object_unref (pipeline);
    return -1;
  }
  g_free (l);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (my_bus_message_cb), loop);
  gst_object_unref (bus);

  /* play */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE)
    g_error ("Failed to set pipeline to PLAYING.\n");

/*** block e  from ../../../docs/manual/advanced-position.xml ***/
  /* run pipeline */
  g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
  g_main_loop_run (loop);

/*** block f  from ../../../docs/manual/advanced-position.xml ***/
  /* clean up */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;

/*** block g  from ../../../docs/manual/advanced-position.xml ***/
}
Ejemplo n.º 29
0
gint
main (gint argc, gchar * argv[])
{
  GstElement *pipeline;
  GstElement *uload, *filter, *sink;
  GstElement *sourcebin;
  GstBus *bus;
  GError *error = NULL;

  GtkWidget *window;
  GtkWidget *screen;
  GtkWidget *vbox;
  GtkWidget *hbox;
  GtkWidget *play, *pause, *null, *ready;

  gchar **source_desc_array = NULL;
  gchar *source_desc = NULL;

  GOptionContext *context;
  GOptionEntry options[] = {
    {"source-bin", 's', 0, G_OPTION_ARG_STRING_ARRAY, &source_desc_array,
        "Use a custom source bin description (gst-launch style)", NULL}
    ,
    {"method", 'm', 0, G_OPTION_ARG_INT, &method,
        "1 for gstdifferencematte, 2 for gloverlay", "M"}
    ,
    {"delay", 'd', 0, G_OPTION_ARG_INT, &delay,
          "Wait N seconds before to send the image to gstreamer (useful with differencematte)",
        "N"}
    ,
    {NULL}
  };

  g_thread_init (NULL);

  context = g_option_context_new (NULL);
  g_option_context_add_main_entries (context, options, NULL);
  g_option_context_add_group (context, gst_init_get_option_group ());
  g_option_context_add_group (context, gtk_get_option_group (TRUE));
  if (!g_option_context_parse (context, &argc, &argv, &error)) {
    g_print ("Inizialization error: %s\n", GST_STR_NULL (error->message));
    return -1;
  }
  g_option_context_free (context);

  if (source_desc_array != NULL) {
    source_desc = g_strjoinv (" ", source_desc_array);
    g_strfreev (source_desc_array);
  }
  if (source_desc == NULL) {
    source_desc =
        g_strdup
        ("videotestsrc ! video/x-raw-rgb, width=352, height=288 ! identity");
  }

  sourcebin =
      gst_parse_bin_from_description (g_strdup (source_desc), TRUE, &error);
  g_free (source_desc);
  if (error) {
    g_print ("Error while parsing source bin description: %s\n",
        GST_STR_NULL (error->message));
    return -1;
  }

  g_set_application_name ("gst-gl-effects test app");

  window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_container_set_border_width (GTK_CONTAINER (window), 3);

  pipeline = gst_pipeline_new ("pipeline");

  uload = gst_element_factory_make ("glupload", "glu");
  if (method == 2) {
    filter = gst_element_factory_make ("gloverlay", "flt");
  } else {
    filter = gst_element_factory_make ("gldifferencematte", "flt");
  }
  sink = gst_element_factory_make ("glimagesink", "glsink");

  gst_bin_add_many (GST_BIN (pipeline), sourcebin, uload, filter, sink, NULL);

  if (!gst_element_link_many (sourcebin, uload, filter, sink, NULL)) {
    g_print ("Failed to link one or more elements!\n");
    return -1;
  }

  g_signal_connect (G_OBJECT (window), "delete-event",
      G_CALLBACK (destroy_cb), pipeline);
  g_signal_connect (G_OBJECT (window), "destroy-event",
      G_CALLBACK (destroy_cb), pipeline);

  screen = gtk_drawing_area_new ();

  gtk_widget_set_size_request (screen, 640, 480);       // 500 x 376

  vbox = gtk_vbox_new (FALSE, 2);

  gtk_box_pack_start (GTK_BOX (vbox), screen, TRUE, TRUE, 0);

  hbox = gtk_hbox_new (FALSE, 0);

  play = gtk_button_new_with_label ("PLAY");

  g_signal_connect (G_OBJECT (play), "clicked", G_CALLBACK (play_cb), pipeline);

  pause = gtk_button_new_with_label ("PAUSE");

  g_signal_connect (G_OBJECT (pause), "clicked",
      G_CALLBACK (pause_cb), pipeline);

  null = gtk_button_new_with_label ("NULL");

  g_signal_connect (G_OBJECT (null), "clicked", G_CALLBACK (null_cb), pipeline);

  ready = gtk_button_new_with_label ("READY");

  g_signal_connect (G_OBJECT (ready), "clicked",
      G_CALLBACK (ready_cb), pipeline);

  gtk_box_pack_start (GTK_BOX (hbox), null, TRUE, TRUE, 0);
  gtk_box_pack_start (GTK_BOX (hbox), ready, TRUE, TRUE, 0);
  gtk_box_pack_start (GTK_BOX (hbox), play, TRUE, TRUE, 0);
  gtk_box_pack_start (GTK_BOX (hbox), pause, TRUE, TRUE, 0);

  gtk_box_pack_start (GTK_BOX (vbox), hbox, FALSE, FALSE, 0);

  gtk_container_add (GTK_CONTAINER (window), vbox);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, screen);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (message_cb), pipeline);
  g_signal_connect (bus, "message::warning", G_CALLBACK (message_cb), pipeline);
  g_signal_connect (bus, "message::eos", G_CALLBACK (message_cb), pipeline);
  gst_object_unref (bus);
  g_signal_connect (screen, "expose-event", G_CALLBACK (expose_cb), sink);
  g_signal_connect (screen, "realize", G_CALLBACK (realize_cb), pipeline);

  gtk_drag_dest_set (screen, GTK_DEST_DEFAULT_ALL, NULL, 0, GDK_ACTION_COPY);
  gtk_drag_dest_add_uri_targets (screen);

  g_signal_connect (screen, "drag-data-received",
      G_CALLBACK (on_drag_data_received), filter);

  gtk_widget_show_all (GTK_WIDGET (window));

  gtk_main ();

  return 0;
}
Ejemplo n.º 30
0
void  _video_send_init_gstreamer(NiceAgent *magent, guint stream_id)
{
	//GstElement *pipeline, *rpicamsrc, *capsfilter, *h264parse, *rtph264pay, *nicesink;
	GstElement *rpicamsrc, *capsfilter, *h264parse, *rtph264pay, *nicesink;
	//GstBus *bus;
	GstMessage *msg;
	GstStateChangeReturn ret;

	/* Initialize GStreamer */
  	gst_init (NULL, NULL);

	rpicamsrc = gst_element_factory_make ("rpicamsrc", NULL);
	capsfilter = gst_element_factory_make ("capsfilter", NULL);
	h264parse = gst_element_factory_make ("h264parse", NULL);
	rtph264pay = gst_element_factory_make ("rtph264pay", NULL);
	nicesink = gst_element_factory_make ("nicesink", NULL);

	//rpicamsrc
	g_object_set (rpicamsrc, "bitrate", 300000, NULL);
	g_object_set (rpicamsrc, "rotation", 180, NULL);
	//g_object_set (rpicamsrc, "exposure-mode", 9, NULL);
	g_object_set (rpicamsrc, "video-stabilisation", TRUE, NULL);
	g_object_set (capsfilter, "caps", gst_caps_from_string("video/x-h264,width=640,height=480,framerate=25/1"), NULL);
	//rtph264pay
	g_object_set (rtph264pay, "pt", 96, NULL);
	g_object_set (rtph264pay, "config-interval", 1, NULL);

	//Set properties
	g_object_set (nicesink, "agent", magent, NULL);
	g_object_set (nicesink, "stream", stream_id, NULL);
	g_object_set (nicesink, "component", 1, NULL);

	/// Create the empty pipeline
	//pipeline = gst_pipeline_new ("test-pipeline");
	RpiData_SendVideo->pipeline = gst_pipeline_new ("send-video-pipeline");

	if (!RpiData_SendVideo->pipeline || !rpicamsrc ||!capsfilter || !h264parse || !rtph264pay|| !nicesink)
	{
		g_printerr ("Not all elements could be created.\n");
		return -1;
	}

	/// Build the pipeline
	gst_bin_add_many (GST_BIN (RpiData_SendVideo->pipeline), rpicamsrc, capsfilter, h264parse, rtph264pay, nicesink, NULL);
	if (gst_element_link_many ( rpicamsrc, capsfilter, h264parse, rtph264pay, nicesink,  NULL) != TRUE)
	{
		g_printerr ("Elements could not be linked.\n");
		gst_object_unref (RpiData_SendVideo->pipeline);
		return -1;
	}

	RpiData_SendVideo->bus = gst_element_get_bus (RpiData_SendVideo->pipeline);
  	gst_bus_enable_sync_message_emission (RpiData_SendVideo->bus);
  	gst_bus_add_signal_watch (RpiData_SendVideo->bus);

	g_signal_connect (RpiData_SendVideo->bus, "message::error",
      		(GCallback) on_error_video, NULL);

	// Start playing
	ret = gst_element_set_state (RpiData_SendVideo->pipeline, GST_STATE_PLAYING);

	if (ret == GST_STATE_CHANGE_FAILURE)
	{
		g_printerr ("Unable to set the pipeline to the playing state.\n");
		gst_object_unref (RpiData_SendVideo->pipeline);
		return -1;
	}

}