Exemplo n.º 1
0
gint
main (gint   argc,
      gchar *argv[])
{
  GMainLoop *loop;
  GstElement *typefind, *realsink;
  GstElement *filesrc, *typefind1;
  GstBus *bus;
  GError *err = NULL;
  gchar *p;
  
  xmlfile = "manual_dynamic";
  std_log(LOG_FILENAME_LINE, "Test Started manual_dynamic");

  /* init GStreamer and ourselves */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);
  init_factories ();

  /* args */
  if (argc != 2) {
    g_print ("Usage: %s <filename>\n", argv[0]);
    std_log(LOG_FILENAME_LINE, "Test Failed argument need to be passed ");
    create_xml(1); 
    exit (-1);
  }

  /* pipeline */
  //changes for parse launch
  /*
  p = g_strdup_printf ("filesrc location=\"%s\" ! typefind name=tf", argv[1]);  
  std_log(LOG_FILENAME_LINE, "parse launch Start");
  pipeline = gst_parse_launch (p, &err);
  std_log(LOG_FILENAME_LINE, "parse launch Done");
  g_free (p);

  if (err) {
    std_log(LOG_FILENAME_LINE, "Could not construct pipeline");
    create_xml(1);
    g_error ("Could not construct pipeline: %s", err->message);
    g_error_free (err);
    return -1;
  }
  */
  
  filesrc = gst_element_factory_make ("filesrc", "src");
  g_assert (filesrc);  
  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
  
  typefind1 = gst_element_factory_make ("typefind", "tf");
  g_assert (typefind1);  
  
  pipeline = gst_pipeline_new ("pipeline");
  g_assert (pipeline);
  
  gst_bin_add_many (GST_BIN (pipeline), filesrc, typefind1, NULL);

    /* link the elements */
  gst_element_link_many (filesrc, typefind1, NULL);
  
  
//////////////////////////////////////////  
  
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, my_bus_callback, loop);
  gst_object_unref (bus);
  
  if(!bus)
      {
      std_log(LOG_FILENAME_LINE, "failed to create bus");
      create_xml(1);
      return -1;
      }

  typefind = gst_bin_get_by_name (GST_BIN (pipeline), "tf");
  if(!typefind)
      {
      std_log(LOG_FILENAME_LINE, "Failed to create typefind");
      create_xml(1);
      return -1;
      }
  g_signal_connect (typefind, "have-type", G_CALLBACK (cb_typefound), NULL);
  gst_object_unref (GST_OBJECT (typefind));
  
  std_log(LOG_FILENAME_LINE, "Element Create Start"); 
  audiosink = gst_element_factory_make ("audioconvert", "aconv");
  realsink = gst_element_factory_make ("filesink", "audiosink");
  std_log(LOG_FILENAME_LINE, "Element Create Done");
  if(!audiosink || !realsink)
      {
      std_log(LOG_FILENAME_LINE, "Failed to create audiosink or realsink");
      create_xml(1);
      return -1;
      }
  std_log(LOG_FILENAME_LINE, "Create out.txt Start");
  g_object_set(realsink,"location","c:\\data\\out.txt",NULL);
  std_log(LOG_FILENAME_LINE, "Create out.txt Done");
  gst_bin_add_many (GST_BIN (pipeline), audiosink, realsink, NULL);
  std_log(LOG_FILENAME_LINE, "Element Link Start");
  gst_element_link (audiosink, realsink);
  std_log(LOG_FILENAME_LINE, "Element Link Done");
  std_log(LOG_FILENAME_LINE, "Set PLAY State");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  std_log(LOG_FILENAME_LINE, "Set Play State Done");

  /* run */
  std_log(LOG_FILENAME_LINE, "main loop run Start");
  g_main_loop_run (loop);
  std_log(LOG_FILENAME_LINE, "main loop run Done");

  /* exit */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
  return 0;
}
gint
main (gint argc, gchar * argv[])
{
  LocalState state;
  GtkWidget *area, *combo, *w;
  const gchar *uri;

  XInitThreads ();

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  if (argc < 2) {
    g_print ("Usage: 3dvideo <uri-to-play>\n");
    return 1;
  }

  uri = argv[1];

  GstElement *pipeline = gst_element_factory_make ("playbin", NULL);
  GstBin *sinkbin = (GstBin *) gst_parse_bin_from_description ("glupload ! glcolorconvert ! glviewconvert name=viewconvert ! glimagesink name=sink", TRUE, NULL);
#if USE_GLCONVERT_FOR_INPUT
  GstElement *glconvert = gst_bin_get_by_name (sinkbin, "viewconvert");
#endif
  GstElement *videosink = gst_bin_get_by_name (sinkbin, "sink");

  /* Get defaults */
  g_object_get (pipeline, "video-multiview-mode", &state.in_mode,
      "video-multiview-flags", &state.in_flags, NULL);
  gst_child_proxy_get (GST_CHILD_PROXY (videosink), "sink::output-multiview-mode", &state.out_mode,
      "sink::output-multiview-flags", &state.out_flags, NULL);

  detect_mode_from_uri (&state, uri);

  g_return_val_if_fail (pipeline != NULL, 1);
  g_return_val_if_fail (videosink != NULL, 1);

  g_object_set (G_OBJECT (pipeline), "video-sink", sinkbin, NULL);
  g_object_set (G_OBJECT (pipeline), "uri", uri, NULL);

#if USE_GLCONVERT_FOR_INPUT
  g_object_set (G_OBJECT (glconvert), "input-mode-override", state.in_mode,
      NULL);
  g_object_set (G_OBJECT (glconvert), "input-flags-override", state.in_flags,
      NULL);
#else
  g_object_set (G_OBJECT (pipeline), "video-multiview-mode", state.in_mode,
      NULL);
  g_object_set (G_OBJECT (pipeline), "video-multiview-flags", state.in_flags,
      NULL);
#endif

  /* Connect to bus for signal handling */
  GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), pipeline);

  gst_element_set_state (pipeline, GST_STATE_READY);

  area = gtk_drawing_area_new ();
  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, area, NULL);
  gst_object_unref (bus);

  /* Toplevel window */
  GtkWidget *window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_default_size (GTK_WINDOW (window), 800, 600);
  gtk_window_set_title (GTK_WINDOW (window), "Stereoscopic video demo");
  GdkGeometry geometry;
  geometry.min_width = 1;
  geometry.min_height = 1;
  geometry.max_width = -1;
  geometry.max_height = -1;
  gtk_window_set_geometry_hints (GTK_WINDOW (window), window, &geometry,
      GDK_HINT_MIN_SIZE);

  GtkWidget *vbox = gtk_box_new (GTK_ORIENTATION_VERTICAL, 2);
  gtk_container_add (GTK_CONTAINER (window), vbox);

  /* area where the video is drawn */
  gtk_box_pack_start (GTK_BOX (vbox), area, TRUE, TRUE, 0);

  /* Buttons to control the pipeline state */
  GtkWidget *table = gtk_grid_new ();
  gtk_container_add (GTK_CONTAINER (vbox), table);

  GtkWidget *button_state_ready = gtk_button_new_with_label ("Stop");
  g_signal_connect (G_OBJECT (button_state_ready), "clicked",
      G_CALLBACK (button_state_ready_cb), pipeline);
  gtk_grid_attach (GTK_GRID (table), button_state_ready, 1, 0, 1, 1);
  gtk_widget_show (button_state_ready);

  //control state paused
  GtkWidget *button_state_paused = gtk_button_new_with_label ("Pause");
  g_signal_connect (G_OBJECT (button_state_paused), "clicked",
      G_CALLBACK (button_state_paused_cb), pipeline);
  gtk_grid_attach (GTK_GRID (table), button_state_paused, 2, 0, 1, 1);
  gtk_widget_show (button_state_paused);

  //control state playing
  GtkWidget *button_state_playing = gtk_button_new_with_label ("Play");
  g_signal_connect (G_OBJECT (button_state_playing), "clicked",
      G_CALLBACK (button_state_playing_cb), pipeline);
  gtk_grid_attach (GTK_GRID (table), button_state_playing, 3, 0, 1, 1);
  //gtk_widget_show (button_state_playing);

  w = gst_mview_widget_new (FALSE);
  combo = GST_MVIEW_WIDGET (w)->mode_selector;
  gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo),
      enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING,
          state.in_mode));
#if USE_GLCONVERT_FOR_INPUT
  g_signal_connect (G_OBJECT (combo), "changed",
      G_CALLBACK (set_mview_input_mode), glconvert);
#else
  g_signal_connect (G_OBJECT (combo), "changed",
      G_CALLBACK (set_mview_input_mode), pipeline);
#endif

  g_object_set (G_OBJECT (w), "flags", state.in_flags, NULL);
#if USE_GLCONVERT_FOR_INPUT
  g_signal_connect (G_OBJECT (w), "notify::flags",
      G_CALLBACK (input_flags_changed), glconvert);
#else
  g_signal_connect (G_OBJECT (w), "notify::flags",
      G_CALLBACK (input_flags_changed), pipeline);
#endif
  gtk_container_add (GTK_CONTAINER (vbox), w);

  w = gst_mview_widget_new (TRUE);
  combo = GST_MVIEW_WIDGET (w)->mode_selector;
  gtk_combo_box_set_active_id (GTK_COMBO_BOX (combo),
      enum_value_to_nick (GST_TYPE_VIDEO_MULTIVIEW_MODE, state.out_mode));
  g_signal_connect (G_OBJECT (combo), "changed",
      G_CALLBACK (set_mview_output_mode), videosink);

  g_object_set (G_OBJECT (w), "flags", state.out_flags, NULL);
  g_signal_connect (G_OBJECT (w), "notify::flags",
      G_CALLBACK (output_flags_changed), videosink);
  g_signal_connect (G_OBJECT (w), "notify::downmix-mode",
      G_CALLBACK (downmix_method_changed), videosink);
  gtk_container_add (GTK_CONTAINER (vbox), w);

  //configure the pipeline
  g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb),
      pipeline);

  gtk_widget_realize (area);

  /* Redraw needed when paused or stopped (PAUSED or READY) */
  g_signal_connect (area, "draw", G_CALLBACK (draw_cb), videosink);
  g_signal_connect(area, "configure-event", G_CALLBACK(resize_cb), videosink);

  gtk_widget_show_all (window);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  gtk_main ();

  return 0;
}
Exemplo n.º 3
0
static int gst_pipeline_init(gst_app_t *app)
{
	GstBus *bus;
	GstStateChangeReturn state_ret;
	
	GError *error = NULL;

	gst_init(NULL, NULL);

//	app->pipeline = (GstPipeline*)gst_parse_launch("appsrc name=mysrc is-live=true block=false max-latency=1000000 ! h264parse ! vpudec low-latency=true framedrop=true framedrop-level-mask=0x200 ! mfw_v4lsink max-lateness=1000000000 sync=false async=false", &error);

	app->pipeline = (GstPipeline*)gst_parse_launch("appsrc name=mysrc is-live=true block=false max-latency=1000000 ! h264parse ! vpudec low-latency=true framedrop=true framedrop-level-mask=0x200 ! mfw_isink axis-left=0 axis-top=0 disp-width=800 disp-height=480 max-lateness=1000000000 sync=false async=false", &error);
		
	if (error != NULL) {
		printf("could not construct pipeline: %s\n", error->message);
		g_clear_error (&error);	
		return -1;
	}
	
	bus = gst_pipeline_get_bus(app->pipeline);
	gst_bus_add_watch(bus, (GstBusFunc)bus_callback, app);
	gst_object_unref(bus);

	app->src = (GstAppSrc*)gst_bin_get_by_name (GST_BIN (app->pipeline), "mysrc");
	
	gst_app_src_set_stream_type(app->src, GST_APP_STREAM_TYPE_STREAM);

	g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
		
	g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);

	aud_pipeline = gst_parse_launch("appsrc name=audsrc is-live=true block=false max-latency=1000000 ! audio/x-raw-int, signed=true, endianness=1234, depth=16, width=16, rate=48000, channels=2 ! alsasink ",&error);

	if (error != NULL) {
		printf("could not construct pipeline: %s\n", error->message);
		g_clear_error (&error);	
		return -1;
	}	

	aud_src = gst_bin_get_by_name (GST_BIN (aud_pipeline), "audsrc");
	
	gst_app_src_set_stream_type((GstAppSrc *)aud_src, GST_APP_STREAM_TYPE_STREAM);


	au1_pipeline = gst_parse_launch("appsrc name=au1src is-live=true block=false max-latency=1000000 ! audio/x-raw-int, signed=true, endianness=1234, depth=16, width=16, rate=16000, channels=1 ! alsasink ",&error);

	if (error != NULL) {
		printf("could not construct pipeline: %s\n", error->message);
		g_clear_error (&error);	
		return -1;
	}	

	au1_src = gst_bin_get_by_name (GST_BIN (au1_pipeline), "au1src");
	
	gst_app_src_set_stream_type((GstAppSrc *)au1_src, GST_APP_STREAM_TYPE_STREAM);



	mic_pipeline = gst_parse_launch("alsasrc name=micsrc ! audioconvert ! audio/x-raw-int, signed=true, endianness=1234, depth=16, width=16, channels=1, rate=16000 ! queue !appsink name=micsink async=false emit-signals=true blocksize=8192",&error);
	
	if (error != NULL) {
		printf("could not construct mic pipeline: %s\n", error->message);
		g_clear_error (&error);	
		return -1;
	}
	
	mic_sink = gst_bin_get_by_name (GST_BIN (mic_pipeline), "micsink");

	g_object_set(G_OBJECT(mic_sink), "throttle-time", 3000000, NULL);
		
	g_signal_connect(mic_sink, "new-buffer", G_CALLBACK(read_mic_data), NULL);
	
	state_ret = gst_element_set_state (mic_pipeline, GST_STATE_READY);

	return 0;

}
Exemplo n.º 4
0
GstElement 	* ofGstUtils::getGstElementByName(const string & name) const{
	return gst_bin_get_by_name(GST_BIN(gstPipeline),name.c_str());
}
Exemplo n.º 5
0
/**
 * Set up the Gstreamer pipeline. Appsrc gets raw frames, and appsink takes
 * encoded frames.
 *
 * The pipeline looks like this:
 *
 * <pre>
 *  .--------.   .-----------.   .----------.
 *  | appsrc |   |  x264enc  |   | appsink  |
 *  |   .----|   |----.  .---|   |----.     |
 *  |   |src |-->|sink|  |src|-->|sink|-----+-->handoff
 *  |   '----|   |----'  '---|   |----'     |   handler
 *  '--------'   '-----------'   '----------'
 * </pre>
 */
static int gst_encoder_init(struct videnc_state *st, int width, int height,
			    int framerate, int bitrate)
{
	GError* gerror = NULL;
	char pipeline[1024];
	int err = 0;

	gst_encoder_close(st);

	snprintf(pipeline, sizeof(pipeline),
	 "appsrc name=source is-live=TRUE block=TRUE do-timestamp=TRUE ! "
	 "videoparse width=%d height=%d format=i420 framerate=%d/1 ! "
	 "x264enc byte-stream=TRUE rc-lookahead=0"
	 " sync-lookahead=0 bitrate=%d ! "
	 "appsink name=sink emit-signals=TRUE drop=TRUE",
	 width, height, framerate, bitrate / 1000 /* kbit/s */);

	debug("gst_video: format: yu12 = yuv420p = i420\n");

	/* Initialize pipeline. */
	st->pipeline = gst_parse_launch(pipeline, &gerror);
	if (gerror) {
		warning("gst_video: launch error: %s: %s\n",
			gerror->message, pipeline);
		err = gerror->code;
		g_error_free(gerror);
		goto out;
	}

	st->source = gst_bin_get_by_name(GST_BIN(st->pipeline), "source");
	st->sink   = gst_bin_get_by_name(GST_BIN(st->pipeline), "sink");
	if (!st->source || !st->sink) {
		warning("gst_video: failed to get source or sink"
			" pipeline elements\n");
		err = ENOMEM;
		goto out;
	}

	/* Configure appsource */
	st->need_data_handler = g_signal_connect(st->source, "need-data",
				 G_CALLBACK(internal_appsrc_start_feed), st);
	st->enough_data_handler = g_signal_connect(st->source, "enough-data",
				   G_CALLBACK(internal_appsrc_stop_feed), st);

	/* Configure appsink. */
	st->new_buffer_handler = g_signal_connect(st->sink, "new-buffer",
				  G_CALLBACK(internal_appsink_new_buffer), st);

	/********************* Misc **************************/

	/* Bus watch */
	st->bus = gst_pipeline_get_bus(GST_PIPELINE(st->pipeline));

	/********************* Thread **************************/

	/* Synchronization primitives. */
	pthread_mutex_init(&st->mutex, NULL);
	pthread_cond_init(&st->wait, NULL);
	st->bwait = FALSE;

	err = gst_element_set_state(st->pipeline, GST_STATE_PLAYING);
	if (GST_STATE_CHANGE_FAILURE == err) {
		g_warning("set state returned GST_STATE_CHANGE_FAILUER\n");
	}

	/* Launch thread with gstreamer loop. */
	st->run = true;
	err = pthread_create(&st->tid, NULL, internal_thread, st);
	if (err) {
		st->run = false;
		goto out;
	}

	st->gst_inited = true;

 out:
	return err;
}
Exemplo n.º 6
0
static jboolean process_audio (GstElement *source, JNIEnv *env, jobject header)
{
  /* will contain the properties we need to put into the given GstHeader */
  AudioProperties *properties = NULL;
  
  /* GStreamer elements */
  GstElement *pipeline = NULL;
  GstElement *decoder = NULL;
  
  GstElement *typefind = NULL;
  
  GstStateChangeReturn res;

  jboolean result = JNI_FALSE;
  
  properties = (AudioProperties *) g_malloc0 (sizeof (AudioProperties));
  if (properties == NULL)
    {
      return result;
    }
  reset_properties(properties);

  /* 
   * create the decoder element, this will decode the stream and retrieve
   * its properties.
   * We connect a signal to this element, to be informed when it is done
   * in decoding the stream and to get the needed informations about the
   * audio file.
   */
  decoder = gst_element_factory_make ("decodebin", "decoder");
  if (decoder == NULL)
    {
      free_properties(properties);
      return result;
    }
  
  /* now, we create a pipeline and fill it with the other elements */
  pipeline = gst_pipeline_new ("pipeline");
  if (pipeline == NULL)
    {
      gst_object_unref (GST_OBJECT (decoder));
      free_properties(properties);   
      return result;
    }
 
  g_signal_connect (decoder, "new-decoded-pad", G_CALLBACK (new_decoded_pad),
                    pipeline);
  g_signal_connect (G_OBJECT (decoder), "element-added",
                    G_CALLBACK (element_added), properties);
  
  /*
   * we get the typefind from the decodebin to catch the additional properties
   * that the decodebin does not expose to us
   */
  typefind = gst_bin_get_by_name (GST_BIN (decoder), "typefind");
  if (typefind != NULL)
    {
      /* 
       * NOTE: the above is not a typo, we can live without the typefind,
       * just, our stream detection will not be as accurate as we would.
       * Anyway, if this fails, there is some problem, probabily a memory
       * error.
       */
       g_signal_connect (G_OBJECT (typefind), "have-type",
                         G_CALLBACK (typefind_callback), properties);
    }
  
  gst_bin_add_many (GST_BIN (pipeline), source, decoder, NULL);
  gst_element_link (source, decoder);
  
  /* 
   * now, we set the pipeline playing state to pause and traverse it
   * to get the info we need.
   */
   
  res = gst_element_set_state (pipeline, GST_STATE_PAUSED);
  if (res == GST_STATE_CHANGE_FAILURE)
    {
      gst_element_set_state (pipeline, GST_STATE_NULL);
      gst_object_unref (GST_OBJECT (pipeline));
      
      free_properties(properties);
      
      return result;
    }
  
  res = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
  if (res != GST_STATE_CHANGE_SUCCESS)
    {
      gst_element_set_state (pipeline, GST_STATE_NULL);
      gst_object_unref (GST_OBJECT (pipeline));
      
      free_properties(properties);
      
      return result;
    }
  
  if (fill_info (decoder, properties))
    {
      result = set_strings (env, properties, header);
    }
 
  /* free stuff */
  gst_element_set_state (pipeline, GST_STATE_NULL);
   
  free_properties (properties);
  
  gst_object_unref (GST_OBJECT (pipeline));
 
  return result;
}
Exemplo n.º 7
0
bool ofGstVideoUtils::setPipeline(string pipeline, ofPixelFormat pixelFormat, bool isStream, int w, int h){
	internalPixelFormat = pixelFormat;
#ifndef OF_USE_GST_GL
	string caps;
#if GST_VERSION_MAJOR==0
	switch(pixelFormat){
	case OF_PIXELS_MONO:
		caps="video/x-raw-gray, depth=8, bpp=8";
		break;
	case OF_PIXELS_RGBA:
		caps="video/x-raw-rgb, depth=24, bpp=32, endianness=4321, red_mask=0xff0000, green_mask=0x00ff00, blue_mask=0x0000ff, alpha_mask=0x000000ff";
		break;
	case OF_PIXELS_BGRA:
		caps="video/x-raw-rgb, depth=24, bpp=32, endianness=4321, red_mask=0x0000ff, green_mask=0x00ff00, blue_mask=0xff0000, alpha_mask=0x000000ff";
		break;
	case OF_PIXELS_RGB:
	default:
		caps="video/x-raw-rgb, depth=24, bpp=24, endianness=4321, red_mask=0xff0000, green_mask=0x00ff00, blue_mask=0x0000ff, alpha_mask=0x000000ff";
		break;
	}
#else
	if(pixelFormat!=OF_PIXELS_NATIVE){
		caps="video/x-raw, format="+getGstFormatName(pixelFormat);
	}else{
		caps = "video/x-raw,format={RGBA,BGRA,RGB,BGR,RGB16,GRAY8,YV12,I420,NV12,NV21,YUY2}";
	}
#endif

	if(w!=-1 && h!=-1){
		caps+=", width=" + ofToString(w) + ", height=" + ofToString(h);
	}

	string pipeline_string =
		pipeline + " ! appsink name=ofappsink enable-last-sample=0 caps=\"" + caps + "\"";

	if((w==-1 || h==-1) || pixelFormat==OF_PIXELS_NATIVE || allocate(w,h,pixelFormat)){
		return setPipelineWithSink(pipeline_string,"ofappsink",isStream);
	}else{
		return false;
	}
#else
	string pipeline_string =
		pipeline + " ! glcolorscale name=gl_filter ! appsink name=ofappsink enable-last-sample=0 caps=\"video/x-raw,format=RGBA\"";

	bool ret;
	if((w==-1 || h==-1) || pixelFormat==OF_PIXELS_NATIVE || allocate(w,h,pixelFormat)){
		ret = setPipelineWithSink(pipeline_string,"ofappsink",isStream);
	}else{
		ret = false;
	}

	auto glfilter = gst_bin_get_by_name(GST_BIN(getPipeline()),"gl_filter");

#if defined(TARGET_LINUX) && !defined(TARGET_OPENGLES)
	glXMakeCurrent (ofGetX11Display(), None, 0);
	glDisplay = (GstGLDisplay *)gst_gl_display_x11_new_with_display(ofGetX11Display());
	glContext = gst_gl_context_new_wrapped (glDisplay, (guintptr) ofGetGLXContext(),
	    		  GST_GL_PLATFORM_GLX, GST_GL_API_OPENGL);

	g_object_set (G_OBJECT (glfilter), "other-context", glContext, NULL);
	// FIXME: this seems to be the way to add the context in 1.4.5
	//
	// GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE(gstPipeline));
	// gst_bus_enable_sync_message_emission (bus);
	// g_signal_connect (bus, "sync-message", G_CALLBACK (sync_bus_call), this);
	// gst_object_unref(bus);

	glXMakeCurrent (ofGetX11Display(), ofGetX11Window(), ofGetGLXContext());
#elif defined(TARGET_OPENGLES)
	cout << "current display " << ofGetEGLDisplay() << endl;
	eglMakeCurrent (eglGetDisplay(EGL_DEFAULT_DISPLAY), 0,0, 0);
	glDisplay = (GstGLDisplay *)gst_gl_display_egl_new_with_egl_display(eglGetDisplay(EGL_DEFAULT_DISPLAY));
	glContext = gst_gl_context_new_wrapped (glDisplay, (guintptr) ofGetEGLContext(),
	    		  GST_GL_PLATFORM_EGL, GST_GL_API_GLES2);

	g_object_set (G_OBJECT (glfilter), "other-context", glContext, NULL);
	// FIXME: this seems to be the way to add the context in 1.4.5
	//
	// GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE(gstPipeline));
	// gst_bus_enable_sync_message_emission (bus);
	// g_signal_connect (bus, "sync-message", G_CALLBACK (sync_bus_call), this);
	// gst_object_unref(bus);

	eglMakeCurrent (ofGetEGLDisplay(), ofGetEGLSurface(), ofGetEGLSurface(), ofGetEGLContext());

#endif

	return ret;
#endif
}
Exemplo n.º 8
0
static gboolean
do_switch (GstElement * pipeline)
{
  int other_channel;
  GstElement *select;
  GstElement *aselect;
  GstStateChangeReturn ret;
  gchar *name;
  gchar *othername;
  GstPad *pad;
  GstPad *apad;
  GstPad *otherPad;
  GstPad *aotherPad;
  gint64 v_stoptime, a_stoptime;
  gint64 v_starttime, a_starttime;
  gint64 v_runningtime, a_runningtime;
  gint64 starttime, stoptime;

  other_channel  = active_channel ? 0 : 1;
  active_channel = active_channel ? 0 : 1;

  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "foo");


  /* find the selector */
  select = gst_bin_get_by_name (GST_BIN (pipeline), "selector");
  aselect = gst_bin_get_by_name (GST_BIN (pipeline), "aselector");

  if (!select) {
     g_print("Input selector not found\n");
  }
  if (!aselect) {
     g_print("Audio input selector not found\n");
  }

  /* get the named pad */
  name = g_strdup_printf ("sink%d", active_channel);
  othername = g_strdup_printf ("sink%d", other_channel);
 
  pad = gst_element_get_static_pad (select, name);
  otherPad = gst_element_get_static_pad (select, othername);
  apad = gst_element_get_static_pad (aselect, name);
  aotherPad = gst_element_get_static_pad (aselect, othername);

  if (!pad) {
     g_print("Input selector pad %s not found\n", name);
  }
  if (!apad) {
     g_print("Audio Input selector pad %s not found\n", name);
  }
  if (!otherPad) {
     g_print("Input selector pad %s not found\n", othername);
  }
  if (!aotherPad) {
     g_print("Input selector pad %s not found\n", othername);
  }

  /* set the active pad */

  g_signal_emit_by_name (select, "block", &v_stoptime);
  g_signal_emit_by_name (aselect, "block", &a_stoptime);

  if (v_stoptime > a_stoptime) {
     stoptime = v_stoptime;
  } else {
     stoptime = a_stoptime;
  }

  g_object_get (G_OBJECT(pad), "running-time", &v_runningtime, NULL);
  g_object_get (G_OBJECT(apad), "running-time", &a_runningtime, NULL);
  
  if (v_runningtime < a_runningtime) {
     starttime = v_runningtime;
  } else {
     starttime = a_runningtime;
  } 

//  g_signal_emit_by_name (select, "switch", pad, stoptime, -1);
//  g_signal_emit_by_name (aselect, "switch", apad, stoptime, -1);

  g_signal_emit_by_name (select, "switch", pad, stoptime, starttime);
  g_signal_emit_by_name (aselect, "switch", apad, stoptime, starttime);

  g_free (name);



  return TRUE;
}
Exemplo n.º 9
0
bool GStreamerGWorld::enterFullscreen()
{
    if (m_dynamicPadName)
        return false;

    if (!m_videoWindow)
        m_videoWindow = PlatformVideoWindow::createWindow();

    GstElement* platformVideoSink = gst_element_factory_make("autovideosink", "platformVideoSink");
    GstElement* colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace");
    GstElement* queue = gst_element_factory_make("queue", "queue");
    GstElement* videoScale = gst_element_factory_make("videoscale", "videoScale");

    // Get video sink bin and the tee inside.
    GOwnPtr<GstElement> videoSink;
    g_object_get(m_pipeline, "video-sink", &videoSink.outPtr(), NULL);
    GstElement* tee = gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee");

    // Add and link a queue, ffmpegcolorspace and sink in the bin.
    gst_bin_add_many(GST_BIN(videoSink.get()), platformVideoSink, videoScale, colorspace, queue, NULL);
    gst_element_link_many(queue, colorspace, videoScale, platformVideoSink, NULL);

    // Link a new src pad from tee to queue.
    GstPad* srcPad = gst_element_get_request_pad(tee, "src%d");
    GstPad* sinkPad = gst_element_get_static_pad(queue, "sink");
    gst_pad_link(srcPad, sinkPad);
    gst_object_unref(GST_OBJECT(sinkPad));

    m_dynamicPadName = gst_pad_get_name(srcPad);

    // Roll new elements to pipeline state.
    gst_element_sync_state_with_parent(queue);
    gst_element_sync_state_with_parent(colorspace);
    gst_element_sync_state_with_parent(videoScale);
    gst_element_sync_state_with_parent(platformVideoSink);

    gst_object_unref(tee);

    // Query the current media segment informations and send them towards
    // the new tee branch downstream.

    GstQuery* query = gst_query_new_segment(GST_FORMAT_TIME);
    gboolean queryResult = gst_element_query(m_pipeline, query);

#if GST_CHECK_VERSION(0, 10, 30)
    if (!queryResult) {
        gst_query_unref(query);
        gst_object_unref(GST_OBJECT(srcPad));
        return true;
    }
#else
    // GStreamer < 0.10.30 doesn't set the query result correctly, so
    // just ignore it to avoid a compilation warning.
    // See https://bugzilla.gnome.org/show_bug.cgi?id=620490.
    (void) queryResult;
#endif

    GstFormat format;
    gint64 position;
    if (!gst_element_query_position(m_pipeline, &format, &position))
        position = 0;

    gdouble rate;
    gint64 startValue, stopValue;
    gst_query_parse_segment(query, &rate, &format, &startValue, &stopValue);

    GstEvent* event = gst_event_new_new_segment(FALSE, rate, format, startValue, stopValue, position);
    gst_pad_push_event(srcPad, event);

    gst_query_unref(query);
    gst_object_unref(GST_OBJECT(srcPad));
    return true;
}
Exemplo n.º 10
0
GstElement *
purple_media_manager_get_element(PurpleMediaManager *manager,
		PurpleMediaSessionType type, PurpleMedia *media,
		const gchar *session_id, const gchar *participant)
{
#ifdef USE_VV
	GstElement *ret = NULL;
	PurpleMediaElementInfo *info = NULL;
	PurpleMediaElementType element_type;

	if (type & PURPLE_MEDIA_SEND_AUDIO)
		info = manager->priv->audio_src;
	else if (type & PURPLE_MEDIA_RECV_AUDIO)
		info = manager->priv->audio_sink;
	else if (type & PURPLE_MEDIA_SEND_VIDEO)
		info = manager->priv->video_src;
	else if (type & PURPLE_MEDIA_RECV_VIDEO)
		info = manager->priv->video_sink;

	if (info == NULL)
		return NULL;

	element_type = purple_media_element_info_get_element_type(info);

	if (element_type & PURPLE_MEDIA_ELEMENT_UNIQUE &&
			element_type & PURPLE_MEDIA_ELEMENT_SRC) {
		GstElement *tee;
		GstPad *pad;
		GstPad *ghost;
		gchar *id = purple_media_element_info_get_id(info);

		ret = gst_bin_get_by_name(GST_BIN(
				purple_media_manager_get_pipeline(
				manager)), id);

		if (ret == NULL) {
			GstElement *bin, *fakesink;
			ret = purple_media_element_info_call_create(info,
					media, session_id, participant);
			bin = gst_bin_new(id);
			tee = gst_element_factory_make("tee", "tee");
			gst_bin_add_many(GST_BIN(bin), ret, tee, NULL);
			gst_element_link(ret, tee);

			/*
			 * This shouldn't be necessary, but it stops it from
			 * giving a not-linked error upon destruction
			 */
			fakesink = gst_element_factory_make("fakesink", NULL);
			g_object_set(fakesink, "sync", FALSE, NULL);
			gst_bin_add(GST_BIN(bin), fakesink);
			gst_element_link(tee, fakesink);

			ret = bin;
			gst_object_ref(ret);
			gst_bin_add(GST_BIN(purple_media_manager_get_pipeline(
					manager)), ret);
		}
		g_free(id);

		tee = gst_bin_get_by_name(GST_BIN(ret), "tee");
		pad = gst_element_get_request_pad(tee, "src%d");
		gst_object_unref(tee);
		ghost = gst_ghost_pad_new(NULL, pad);
		gst_object_unref(pad);
		g_signal_connect(GST_PAD(ghost), "unlinked",
				G_CALLBACK(request_pad_unlinked_cb), NULL);
		gst_pad_set_active(ghost, TRUE);
		gst_element_add_pad(ret, ghost);
	} else {
		ret = purple_media_element_info_call_create(info,
				media, session_id, participant);
	}

	if (ret == NULL)
		purple_debug_error("media", "Error creating source or sink\n");

	return ret;
#else
	return NULL;
#endif
}
Exemplo n.º 11
0
static void link_new_pad ( GstElement *source, GstPad *pad, char *queue_name, 
                           char *depayloader_name, char *decoder_name,
                           char *overlay_name,
                           char *selector_name, int channel, char media_type)
{
  GstElement *selector, *queue, *depayloader, *decoder, *overlay;
  gchar *name;
  gchar *media;
  GstPad *sinkpad;
  GstPad *srcpad, *tosrc;
  GstPad *decsrc, *qsink;
  int returnCode;


  if (media_type == 'v') {
     media = g_strdup_printf ("video");
  } else {
     media = g_strdup_printf ("audio");
  }

  selector = gst_bin_get_by_name (GST_BIN (pipeline), selector_name);
  if (!selector) {
     g_print("Unable to get selector element %s.\n", selector_name);
  }

  queue = gst_bin_get_by_name (GST_BIN (pipeline), queue_name);
  if (!queue) {
     g_print("Unable to get selector queue %s.\n", queue_name);
  }

  depayloader = gst_bin_get_by_name (GST_BIN (pipeline), depayloader_name);
  if (!depayloader) {
     g_print("Unable to get depayloader%s.\n", depayloader_name);
  }

  decoder = gst_bin_get_by_name (GST_BIN (pipeline), decoder_name);
  if (!decoder) {
     g_print("Unable to get decoder%s.\n", decoder_name);
  }
  
  overlay = gst_bin_get_by_name (GST_BIN (pipeline), overlay_name);
  // Don't check for error, overlay not present on Audio channels, so this can fail

  /* get all the pads */
  name = g_strdup_printf ("sink%d", channel);
  sinkpad = gst_element_get_request_pad (selector, name);
  if (!sinkpad) {
     g_print("Unable to create pad %s on %s selector.\n", name, media);
  }
  qsink = gst_element_get_static_pad (queue, "sink");
  if (!qsink) {
     g_print("Unable to get sink pad on %s queue %d.\n", media, channel);
  }

  /* Now, link it all up */

  if (returnCode = gst_pad_link (pad, qsink)) {
     g_print("Link of %s source%d pad to queue failed with Code %d\n", media, channel, returnCode);
  }

  if (gst_element_link_many (queue, depayloader, decoder, NULL) != TRUE) {
     g_print("Unable to link %s %s and %s.\n", queue_name, depayloader_name, decoder_name);
  }

  if (overlay) {
     if (gst_element_link (decoder, overlay) != TRUE) {
        g_print("Unable to link %s and %s.\n", decoder_name, overlay_name);
     }
     tosrc  = gst_element_get_static_pad (overlay, "src");
     if (!tosrc) {
        g_print("Unable to get %s overlay src pad on channel %d.\n", media, channel);
     }
     if (returnCode = gst_pad_link (tosrc, sinkpad)) {
        g_print("Link of %s timeoverlay src %d pad to selector failed with Code %d\n", media, channel, returnCode);
     }
     
  } else {
    decsrc  = gst_element_get_static_pad (decoder, "src");
    if (!decsrc) {
       g_print("Unable to get %s decoder src pad on channel %d.\n", media, channel);
    }
    if (returnCode = gst_pad_link (decsrc, sinkpad)) {
       g_print("Link of %s decoder src %d pad to selector failed with Code %d\n", media, channel, returnCode);
    }
  }


//  g_print("Linked %s source %d to %s %s and %s selector pad %s.\n", 
//           media, channel, depayloader_name, decoder_name, queue_name, name);

  gst_element_sync_state_with_parent(source);

}
int PsychCreateNewMovieFile(char* moviefile, int width, int height, double framerate, char* movieoptions)
{
	PsychMovieWriterRecordType*             pwriterRec = NULL;
	int                                     moviehandle = 0;
	GError                                  *myErr = NULL;
	char*                                   poption;
	char                                    codecString[1000];
	char                                    launchString[10000];
	int                                     dummyInt;
	float                                   dummyFloat;
	char                                    myfourcc[5];
	psych_bool                              doAudio = FALSE;

	// Still capacity left?
	if (moviewritercount >= PSYCH_MAX_MOVIEWRITERDEVICES) PsychErrorExitMsg(PsychError_user, "Maximum number of movie writers exceeded. Please close some first!");

	// Find first free (i.e., NULL) slot and assign moviehandle:
	while ((pwriterRec = PsychGetMovieWriter(moviehandle, TRUE)) && pwriterRec->Movie) moviehandle++;

	if (firsttime) {
		// Make sure GStreamer is ready:
		PsychGSCheckInit("movie writing");
		firsttime = FALSE;
	}

	// Store movie filename:
	strcpy(pwriterRec->File, moviefile);

	// Store width, height:
	pwriterRec->height  = height;
	pwriterRec->width   = width;
	pwriterRec->eos     = FALSE;

	// If no movieoptions specified, create default string for default
	// codec selection and configuration:
	if (strlen(movieoptions) == 0) {
		// No options provided. Select default encoder with default settings:
		movieoptions = strdup("DEFAULTenc");
	} else if ((poption = strstr(movieoptions, ":CodecSettings="))) {
		// Replace ':' with a zero in movieoptions, so it gets null-terminated:
		movieoptions = poption;
		*movieoptions = 0;

		// Move after null-terminator:
		movieoptions++;

		// Replace the ':CodecSettings=' with the special keyword 'DEFAULTenc', so
		// so the default video codec is chosen, but the given settings override its
		// default parameters.
		strncpy(movieoptions, "DEFAULTenc    ", strlen("DEFAULTenc    "));

		if (strlen(movieoptions) == 0) PsychErrorExitMsg(PsychError_user, "Invalid (empty) :CodecSettings= parameter specified. Aborted.");
	} else if ((poption = strstr(movieoptions, ":CodecType="))) {
		// Replace ':' with a zero in movieoptions, so it gets null-terminated
		// and only points to the actual movie filename:
		movieoptions = poption;
		*movieoptions = 0;

		// Advance movieoptions to point to the actual codec spec string:
		movieoptions+= 11;

		if (strlen(movieoptions) == 0) PsychErrorExitMsg(PsychError_user, "Invalid (empty) :CodecType= parameter specified. Aborted.");
	}

	// Assign numeric 32-bit FOURCC equivalent code to select codec:
	// This is optional. We default to kH264CodecType:
	if ((poption = strstr(movieoptions, "CodecFOURCCId="))) {
		if (sscanf(poption, "CodecFOURCCId=%i", &dummyInt) == 1) {
			pwriterRec->CodecType = dummyInt;
			if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Codec with FOURCC numeric id %i [%" GST_FOURCC_FORMAT "] requested for encoding of movie %i [%s].\n", dummyInt, GST_FOURCC_ARGS(dummyInt), moviehandle, moviefile);
			if (PsychPrefStateGet_Verbosity() > 1) printf("PTB-WARNING: Codec selection by FOURCC not yet supported. FOURCC code ignored!\n");            
		}
		else PsychErrorExitMsg(PsychError_user, "Invalid CodecFOURCCId= parameter provided in movieoptions parameter. Parse error!");
	}

	// Assign 4 character string FOURCC code to select codec:
	if ((poption = strstr(movieoptions, "CodecFOURCC="))) {
		if (sscanf(poption, "CodecFOURCC=%c%c%c%c", &myfourcc[0], &myfourcc[1], &myfourcc[2], &myfourcc[3]) == 4) {
			myfourcc[4] = 0;
			dummyInt = (int) GST_STR_FOURCC (myfourcc);
			pwriterRec->CodecType = dummyInt;
			if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Codec with FOURCC numeric id %i [%" GST_FOURCC_FORMAT "] requested for encoding of movie %i [%s].\n", dummyInt, GST_FOURCC_ARGS(dummyInt), moviehandle, moviefile);
			if (PsychPrefStateGet_Verbosity() > 1) printf("PTB-WARNING: Codec selection by FOURCC not yet supported. FOURCC code ignored!\n");            
		}
		else PsychErrorExitMsg(PsychError_user, "Invalid CodecFOURCC= parameter provided in movieoptions parameter. Must be exactly 4 characters! Parse error!");
	}

	// Assign numeric encoding quality level:
	// This is optional. We default to "normal quality":
	if ((poption = strstr(movieoptions, "EncodingQuality="))) {
		if ((sscanf(poption, "EncodingQuality=%f", &dummyFloat) == 1) && (dummyFloat >= 0) && (dummyFloat <= 1)) {
			// Map floating point quality level between 0.0 and 1.0 to 10 discrete levels:
			if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Encoding quality level %f selected for encoding of movie %i [%s].\n", dummyFloat, moviehandle, moviefile);

			// Rewrite "EncodingQuality=" string into "VideoQuality=" string, with proper
			// padding:      "EncodingQuality="
			// This way EncodingQuality in Quicktime lingo corresponds to
			// VideoQuality in GStreamer lingo:
			strncpy(poption, "   Videoquality=", strlen("   Videoquality="));
		}
		else PsychErrorExitMsg(PsychError_user, "Invalid EncodingQuality= parameter provided in movieoptions parameter. Parse error or out of valid 0 - 1 range!");
	}

	// Check for valid parameters. Also warn if some parameters are borderline for certain codecs:
	if ((framerate < 1) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: Negative or zero 'framerate' %f units for moviehandle %i provided! Sounds like trouble ahead.\n", (float) framerate, moviehandle);
	if (width < 1) PsychErrorExitMsg(PsychError_user, "In CreateMovie: Invalid zero or negative 'width' for video frame size provided!");
	if ((width < 4) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'width' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with such a small width.\n", width, moviehandle);
	if ((width % 4 != 0) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'width' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with a width which is not a multiple of 4 or 16.\n", width, moviehandle);
	if (height < 1) PsychErrorExitMsg(PsychError_user, "In CreateMovie: Invalid zero or negative 'height' for video frame size provided!");
	if ((height < 4) && (PsychPrefStateGet_Verbosity() > 1)) printf("PTB-WARNING:In CreateMovie: 'height' of %i pixels for moviehandle %i provided! Some video codecs may malfunction with such a small height.\n", height, moviehandle);

	// Full GStreamer launch line a la gst-launch command provided?
	if (strstr(movieoptions, "gst-launch")) {
		// Yes: We use movieoptions directly as launch line:
		movieoptions = strstr(movieoptions, "gst-launch");
        
		// Move string pointer behind the "gst-launch" word (plus a blank):
		movieoptions+= strlen("gst-launch ");
        
		// Can directly use this:
		sprintf(launchString, "%s", movieoptions);

		// With audio track?
		if (strstr(movieoptions, "name=ptbaudioappsrc")) doAudio = TRUE;
	}
	else {
		// No: Do our own parsing and setup:

		// Find the gst-launch style string for codecs and muxers:
		if (!PsychGetCodecLaunchLineFromString(movieoptions, &(codecString[0]))) {
			// No config for this format possible:
			if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR:In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find matching codec setup.\n", moviehandle, moviefile);
			goto bail;
		}
        
		// With audio track?
		if (strstr(movieoptions, "AddAudioTrack")) doAudio = TRUE;
        
		// Build final launch string:
		if (doAudio) {
			// Video and audio:
			sprintf(launchString, "appsrc name=ptbvideoappsrc do-timestamp=0 stream-type=0 max-bytes=0 block=1 is-live=0 emit-signals=0 ! capsfilter caps=\"video/x-raw-rgb, bpp=(int)32, depth=(int)32, endianess=(int)4321, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, width=(int)%i, height=(int)%i, framerate=%i/1 \" ! videorate ! ffmpegcolorspace ! %s ! filesink name=ptbfilesink async=0 location=%s ", width, height, ((int) (framerate + 0.5)), codecString, moviefile);
		} else {
			// Video only:
			sprintf(launchString, "appsrc name=ptbvideoappsrc do-timestamp=0 stream-type=0 max-bytes=0 block=1 is-live=0 emit-signals=0 ! capsfilter caps=\"video/x-raw-rgb, bpp=(int)32, depth=(int)32, endianess=(int)4321, red_mask=(int)16711680, green_mask=(int)65280, blue_mask=(int)255, width=(int)%i, height=(int)%i, framerate=%i/1 \" ! videorate ! ffmpegcolorspace ! %s ! filesink name=ptbfilesink async=0 location=%s ", width, height, ((int) (framerate + 0.5)), codecString, moviefile);
		}
	}
        
	// Create a movie file for the destination movie:
	if (PsychPrefStateGet_Verbosity() > 3) {
		printf("PTB-INFO: Movie writing pipeline gst-launch line (without the -e option required on the command line!) is:\n");
		printf("gst-launch %s\n", launchString);
	}

	// Build pipeline from launch string:
	pwriterRec->Movie = gst_parse_launch((const gchar*) launchString, &myErr);
	if ((NULL == pwriterRec->Movie) || myErr) {
		if (PsychPrefStateGet_Verbosity() > 0) {
			printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not build pipeline.\n", moviehandle, moviefile);
			printf("PTB-ERROR: Parameters were: %s\n", movieoptions);
			printf("PTB-ERROR: Launch string was: %s\n", launchString);
			printf("PTB-ERROR: GStreamer error message was: %s\n", (char*) myErr->message);

		      // Special tips for the challenged:
		      if (strstr(myErr->message, "property")) {
			      // Bailed due to unsupported x264enc parameter "speed-preset" or "profile". Can be solved by upgrading
			      // GStreamer or the OS or the VideoCodec= override:
			      printf("PTB-TIP: The reason this failed is because your GStreamer codec installation is too outdated.\n");
			      printf("PTB-TIP: Either upgrade your GStreamer (plugin) installation to a more recent version,\n");
			      printf("PTB-TIP: or upgrade your operating system (e.g., Ubuntu 10.10 'Maverick Meercat' and later are fine).\n");
			      printf("PTB-TIP: A recent GStreamer installation is required to use all features and get optimal performance.\n");
			      printf("PTB-TIP: As a workaround, you can manually specify all codec settings, leaving out the unsupported\n");
			      printf("PTB-TIP: option. See 'help VideoRecording' on how to do that.\n\n");
		      }
		}

		goto bail;
	}

	// Get handle to ptbvideoappsrc:
	pwriterRec->ptbvideoappsrc = gst_bin_get_by_name(GST_BIN(pwriterRec->Movie), (const gchar *) "ptbvideoappsrc");
	if (NULL == pwriterRec->ptbvideoappsrc) {
		if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find ptbvideoappsrc pipeline element.\n", moviehandle, moviefile);
		goto bail;
	}

	// Get handle to ptbaudioappsrc:
	pwriterRec->ptbaudioappsrc = gst_bin_get_by_name(GST_BIN(pwriterRec->Movie), (const gchar *) "ptbaudioappsrc");
	if (doAudio && (NULL == pwriterRec->ptbaudioappsrc)) {
		if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed: Could not find ptbaudioappsrc pipeline element.\n", moviehandle, moviefile);
		goto bail;
	}

	pwriterRec->Context = g_main_loop_new (NULL, FALSE);
	pwriterRec->bus = gst_pipeline_get_bus (GST_PIPELINE(pwriterRec->Movie));
	gst_bus_add_watch(pwriterRec->bus, (GstBusFunc) PsychMovieBusCallback, pwriterRec);
	gst_object_unref(pwriterRec->bus);

	// Start the pipeline:
	if (!PsychMoviePipelineSetState(pwriterRec->Movie, GST_STATE_PLAYING, 10)) {
		if (PsychPrefStateGet_Verbosity() > 0) printf("PTB-ERROR: In CreateMovie: Creating movie file with handle %i [%s] failed:  Failed to start movie encoding pipeline!\n", moviehandle, moviefile);
		goto bail;
	}

	PsychGSProcessMovieContext(pwriterRec->Context, FALSE);

	// Increment count of open movie writers:
	moviewritercount++;
	
	if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-INFO: Moviehandle %i successfully opened for movie writing into file '%s'.\n", moviehandle, moviefile);

    // Should we dump the whole encoding pipeline graph to a file for visualization
    // with GraphViz? This can be controlled via PsychTweak('GStreamerDumpFilterGraph' dirname);
    if (getenv("GST_DEBUG_DUMP_DOT_DIR")) {
        // Dump complete encoding filter graph to a .dot file for later visualization with GraphViz:
        printf("PTB-DEBUG: Dumping movie encoder graph for movie %s to directory %s.\n", moviefile, getenv("GST_DEBUG_DUMP_DOT_DIR"));
        GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pwriterRec->Movie), GST_DEBUG_GRAPH_SHOW_ALL, "PsychMovieWritingGraph");
    }

	// Return new handle:
	return(moviehandle);

bail:
	if (pwriterRec->ptbvideoappsrc) gst_object_unref(GST_OBJECT(pwriterRec->ptbvideoappsrc));
	pwriterRec->ptbvideoappsrc = NULL;

	if (pwriterRec->ptbaudioappsrc) gst_object_unref(GST_OBJECT(pwriterRec->ptbaudioappsrc));
	pwriterRec->ptbaudioappsrc = NULL;

	if (pwriterRec->Movie) gst_object_unref(GST_OBJECT(pwriterRec->Movie));
	pwriterRec->Movie = NULL;

	if (pwriterRec->Context) g_main_loop_unref(pwriterRec->Context);
	pwriterRec->Context = NULL;

	// Return failure:
	return(-1);
}
Exemplo n.º 13
0
// init
bool gstCamera::init( gstCameraSrc src )
{
	GError* err = NULL;
	printf(LOG_GSTREAMER "gstCamera attempting to initialize with %s\n", gstCameraSrcToString(src));

	// build pipeline string
	if( !buildLaunchStr(src) )
	{
		printf(LOG_GSTREAMER "gstCamera failed to build pipeline string\n");
		return false;
	}

	// launch pipeline
	mPipeline = gst_parse_launch(mLaunchStr.c_str(), &err);

	if( err != NULL )
	{
		printf(LOG_GSTREAMER "gstCamera failed to create pipeline\n");
		printf(LOG_GSTREAMER "   (%s)\n", err->message);
		g_error_free(err);
		return false;
	}

	GstPipeline* pipeline = GST_PIPELINE(mPipeline);

	if( !pipeline )
	{
		printf(LOG_GSTREAMER "gstreamer failed to cast GstElement into GstPipeline\n");
		return false;
	}	

	// retrieve pipeline bus
	/*GstBus**/ mBus = gst_pipeline_get_bus(pipeline);

	if( !mBus )
	{
		printf(LOG_GSTREAMER "gstreamer failed to retrieve GstBus from pipeline\n");
		return false;
	}

	// add watch for messages (disabled when we poll the bus ourselves, instead of gmainloop)
	//gst_bus_add_watch(mBus, (GstBusFunc)gst_message_print, NULL);

	// get the appsrc
	GstElement* appsinkElement = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");
	GstAppSink* appsink = GST_APP_SINK(appsinkElement);

	if( !appsinkElement || !appsink)
	{
		printf(LOG_GSTREAMER "gstreamer failed to retrieve AppSink element from pipeline\n");
		return false;
	}
	
	mAppSink = appsink;
	
	// setup callbacks
	GstAppSinkCallbacks cb;
	memset(&cb, 0, sizeof(GstAppSinkCallbacks));
	
	cb.eos         = onEOS;
	cb.new_preroll = onPreroll;
	cb.new_sample  = onBuffer;
	
	gst_app_sink_set_callbacks(mAppSink, &cb, (void*)this, NULL);
	
	return true;
}
Exemplo n.º 14
0
gint
main (gint argc, gchar ** argv)
{
  GstElement *pipeline;
  GstElement *shapewipe;
  GstController *ctrl;
  GstLFOControlSource *csource;
  GValue val = { 0, };
  GMainLoop *loop;
  GstBus *bus;
  gchar *pipeline_string;

  if (argc != 2) {
    g_print ("Usage: shapewipe mask.png\n");
    return -1;
  }

  gst_init (&argc, &argv);
  gst_controller_init (&argc, &argv);

  pipeline_string =
      g_strdup_printf
      ("videotestsrc ! video/x-raw-yuv,width=640,height=480 ! shapewipe name=shape border=0.05 ! videomixer name=mixer ! ffmpegcolorspace ! autovideosink     filesrc location=%s ! typefind ! decodebin2 ! ffmpegcolorspace ! videoscale ! queue ! shape.mask_sink    videotestsrc pattern=snow ! video/x-raw-yuv,width=640,height=480 ! queue ! mixer.",
      argv[1]);

  pipeline = gst_parse_launch (pipeline_string, NULL);
  g_free (pipeline_string);

  if (pipeline == NULL) {
    g_print ("Failed to create pipeline\n");
    return -2;
  }

  shapewipe = gst_bin_get_by_name (GST_BIN (pipeline), "shape");

  if (!(ctrl = gst_controller_new (G_OBJECT (shapewipe), "position", NULL))) {
    g_print ("can't control shapewipe element\n");
    return -3;
  }

  csource = gst_lfo_control_source_new ();

  gst_controller_set_control_source (ctrl, "position",
      GST_CONTROL_SOURCE (csource));

  g_value_init (&val, G_TYPE_FLOAT);
  g_value_set_float (&val, 0.5);
  g_object_set (G_OBJECT (csource), "amplitude", &val, NULL);
  g_value_set_float (&val, 0.5);
  g_object_set (G_OBJECT (csource), "offset", &val, NULL);
  g_value_unset (&val);

  g_object_set (G_OBJECT (csource), "frequency", 0.5, NULL);
  g_object_set (G_OBJECT (csource), "timeshift", 500 * GST_MSECOND, NULL);

  g_object_unref (csource);

  loop = g_main_loop_new (NULL, FALSE);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (on_message), loop);
  gst_object_unref (GST_OBJECT (bus));

  if (gst_element_set_state (pipeline,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
    g_error ("Failed to go into PLAYING state");
    return -4;
  }

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_main_loop_unref (loop);

  g_object_unref (G_OBJECT (ctrl));
  gst_object_unref (G_OBJECT (pipeline));

  return 0;
}
Exemplo n.º 15
0
bool ofGstUtils::initGrabber(int w, int h) {
    bpp = 3;
    if(!camData.bInited) get_video_devices(camData);

    ofGstVideoFormat * format = selectFormat(w,h);
    if(! format) {
        return false;
    }

    /*	ofLog(OF_LOG_VERBOSE,"ofGstUtils: selected format: " + ofToString(format->width) + "x" + ofToString(format->height) + " " + format->mimetype + " framerate: " + ofToString(format->highest_framerate.numerator) + "/" + ofToString(format->highest_framerate.denominator));*/


    camData.webcam_devices[deviceID].current_format = format;
    bIsCamera = true;
    bHavePixelsChanged 	= false;

    width = w;
    height = h;

    gstData.loop		= g_main_loop_new (NULL, FALSE);


    const char * decodebin = "";
    if(strcmp(format->mimetype,"video/x-raw-yuv")!=0 && strcmp(format->mimetype,"video/x-raw-rgb")!=0)
        decodebin = "decodebin !";

    const char * scale = "";
    if( strcmp(format->mimetype,"video/x-raw-rgb")!=0 ) scale = "ffmpegcolorspace !";
    if( w!=format->width || h!=format->height )	scale = "ffvideoscale method=2 !";

    string format_str_pipeline = string("%s name=video_source device=%s ! ") +
                                 "%s,width=%d,height=%d,framerate=%d/%d ! " +
                                 "%s %s " +
                                 "video/x-raw-rgb, width=%d, height=%d, depth=24 ! appsink name=sink  caps=video/x-raw-rgb";
    gchar* pipeline_string =g_strdup_printf (
                                format_str_pipeline.c_str(),
                                camData.webcam_devices[deviceID].gstreamer_src,
                                camData.webcam_devices[deviceID].video_device,
                                format->mimetype,
                                format->width,
                                format->height,
                                format->highest_framerate.numerator,
                                format->highest_framerate.denominator,
                                decodebin, scale,
                                w,h);

//	ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string);

    GError * error = NULL;
    gstPipeline = gst_parse_launch (pipeline_string, &error);

    gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink");

    gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);


    if(startPipeline()) {
        play();
        return true;
    } else {
        return false;
    }
}
Exemplo n.º 16
0
bool VideoSender::enableSending(bool enable)
{
  GstElement *sink;
#define USE_TEE 0
#if USE_TEE
  GstElement *ob;
#endif
  GError *error = NULL;

  qDebug() << "In" << __FUNCTION__ << ", Enable:" << enable;

  // Disable video sending
  if (enable == false) {
    qDebug() << "Stopping video encoding";
    if (pipeline) {
      gst_element_set_state(pipeline, GST_STATE_NULL);
    }

    qDebug() << "Deleting pipeline";
    if (pipeline) {
      gst_object_unref(GST_OBJECT(pipeline));
      pipeline = NULL;
    }
    encoder = NULL;

    ODdata[OB_VIDEO_PARAM_CONTINUE] = 0;
    if (ODprocess) {
      ODprocess->write((const char *)ODdata, sizeof(ODdata));
    }

    return true;
  }

  if (pipeline) {
    // Do nothing as the pipeline has already been created and is
    // probably running
    qCritical("Pipeline exists already, doing nothing");
    return true;
  }

  // Initialisation. We don't pass command line arguments here
  if (!gst_init_check(NULL, NULL, NULL)) {
    qCritical("Failed to init GST");
    return false;
  }

  if (!hardware) {
    qCritical("No hardware plugin");
    return false;
  }

  QString pipelineString = "";
  pipelineString.append(videoSource + " name=source");
  pipelineString.append(" ! ");
  pipelineString.append("capsfilter caps=\"video/x-raw,format=(string)I420,framerate=(fraction)30/1,");
  switch(quality) {
  default:
  case 0:
    pipelineString.append("width=(int)320,height=(int)240");
    break;
  case 1:
    pipelineString.append("width=(int)640,height=(int)480");
    break;
  case 2:
    pipelineString.append("width=(int)800,height=(int)600");
    break;
  }

  pipelineString.append("\"");

#if USE_TEE
  pipelineString.append(" ! ");
  pipelineString.append("tee name=scripttee");
  // FIXME: does this case latency?
  pipelineString.append(" ! ");
  pipelineString.append("queue");
#endif
  pipelineString.append(" ! ");
  pipelineString.append(hardware->getEncodingPipeline());
  pipelineString.append(" ! ");
  pipelineString.append("rtph264pay name=rtppay config-interval=1 mtu=500");
  pipelineString.append(" ! ");
  pipelineString.append("appsink name=sink sync=false max-buffers=1 drop=true");
#if USE_TEE
  // Tee (branch) frames for external components
  pipelineString.append(" scripttee. ");
  // TODO: downscale to 320x240?
  pipelineString.append(" ! ");
  pipelineString.append("appsink name=ob sync=false max-buffers=1 drop=true");
#endif
  qDebug() << "Using pipeline:" << pipelineString;

  // Create encoding video pipeline
  pipeline = gst_parse_launch(pipelineString.toUtf8(), &error);
  if (!pipeline) {
    qCritical("Failed to parse pipeline: %s", error->message);
    g_error_free(error);
    return false;
  }

  encoder = gst_bin_get_by_name(GST_BIN(pipeline), "encoder");
  if (!encoder) {
    qCritical("Failed to get encoder");
    return false;
  }

  // Assuming here that X86 uses x264enc
  if (hardware->getHardwareName() == "generic_x86") {
    g_object_set(G_OBJECT(encoder), "speed-preset", 1, NULL); // ultrafast
    g_object_set(G_OBJECT(encoder), "tune", 0x00000004, NULL); // zerolatency
  }

  if (hardware->getHardwareName() == "tegrak1" ||
      hardware->getHardwareName() == "tegrax1") {
    //g_object_set(G_OBJECT(encoder), "input-buffers", 2, NULL); // not valid on 1.0
    //g_object_set(G_OBJECT(encoder), "output-buffers", 2, NULL); // not valid on 1.0
    //g_object_set(G_OBJECT(encoder), "quality-level", 0, NULL);
    //g_object_set(G_OBJECT(encoder), "rc-mode", 0, NULL);
  }

  if (hardware->getHardwareName() == "tegrax2") {
    g_object_set(G_OBJECT(encoder), "preset-level", 0, NULL); // 0 == UltraFastPreset for high perf
  }

  setBitrate(bitrate);

  {
    GstElement *source;
    source = gst_bin_get_by_name(GST_BIN(pipeline), "source");
    if (!source) {
      qCritical("Failed to get source");
      return false;
    }

    g_object_set(G_OBJECT(source), "do-timestamp", true, NULL);

    if (videoSource == "videotestsrc") {
      g_object_set(G_OBJECT(source), "is-live", true, NULL);
    } else if (videoSource == "v4l2src") {
      //g_object_set(G_OBJECT(source), "always-copy", false, NULL);

      const char *camera = "/dev/video0";
      QByteArray env_camera = qgetenv("PLECO_SLAVE_CAMERA");
      if (!env_camera.isNull()) {
        camera = env_camera.data();
      }
      g_object_set(G_OBJECT(source), "device", camera, NULL);
    }

    if (hardware->getHardwareName() == "tegrak1" ||
        hardware->getHardwareName() == "tegrax1") {
      g_object_set(G_OBJECT(source), "io-mode", 1, NULL);
    }
  }


  sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
  if (!sink) {
    qCritical("Failed to get sink");
    return false;
  }

  // Set appsink callbacks
  GstAppSinkCallbacks appSinkCallbacks;
  appSinkCallbacks.eos             = NULL;
  appSinkCallbacks.new_preroll     = NULL;
  appSinkCallbacks.new_sample      = &newBufferCB;

  gst_app_sink_set_callbacks(GST_APP_SINK(sink), &appSinkCallbacks, this, NULL);
#if USE_TEE
  // Callbacks for the OB process appsink
  ob = gst_bin_get_by_name(GST_BIN(pipeline), "ob");
  if (!ob) {
    qCritical("Failed to get ob appsink");
    return false;
  }

  // Set appsink callbacks
  GstAppSinkCallbacks obCallbacks;
  obCallbacks.eos             = NULL;
  obCallbacks.new_preroll     = NULL;
  obCallbacks.new_sample      = &newBufferOBCB;

  gst_app_sink_set_callbacks(GST_APP_SINK(ob), &obCallbacks, this, NULL);
#endif
  // Start running 
  gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);

  launchObjectDetection();

  return true;
}
Exemplo n.º 17
0
bool GStreamerGWorld::enterFullscreen()
{
    if (m_dynamicPadName)
        return false;

    if (!m_videoWindow)
        m_videoWindow = PlatformVideoWindow::createWindow();

    GstElement* platformVideoSink = gst_element_factory_make("autovideosink", "platformVideoSink");
    GstElement* colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace");
    GstElement* queue = gst_element_factory_make("queue", "queue");
    GstElement* videoScale = gst_element_factory_make("videoscale", "videoScale");

    // Get video sink bin and the tee inside.
    GRefPtr<GstElement> videoSink;
    GstElement* sinkPtr = 0;

    g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL);
    videoSink = adoptGRef(sinkPtr);

    GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee"));

    // Add and link a queue, ffmpegcolorspace, videoscale and sink in the bin.
    gst_bin_add_many(GST_BIN(videoSink.get()), platformVideoSink, videoScale, colorspace, queue, NULL);

    // Faster elements linking.
    gst_element_link_pads_full(queue, "src", colorspace, "sink", GST_PAD_LINK_CHECK_NOTHING);
    gst_element_link_pads_full(colorspace, "src", videoScale, "sink", GST_PAD_LINK_CHECK_NOTHING);
    gst_element_link_pads_full(videoScale, "src", platformVideoSink, "sink", GST_PAD_LINK_CHECK_NOTHING);

    // Link a new src pad from tee to queue.
    GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_request_pad(tee.get(), "src%d"));
    GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue, "sink"));
    gst_pad_link(srcPad.get(), sinkPad.get());

    m_dynamicPadName.set(gst_pad_get_name(srcPad.get()));

    // Synchronize the new elements with pipeline state. If it's
    // paused limit the state change to pre-rolling.
    GstState state;
    gst_element_get_state(m_pipeline, &state, 0, 0);
    if (state < GST_STATE_PLAYING)
        state = GST_STATE_READY;

    gst_element_set_state(platformVideoSink, state);
    gst_element_set_state(videoScale, state);
    gst_element_set_state(colorspace, state);
    gst_element_set_state(queue, state);

    // Query the current media segment informations and send them towards
    // the new tee branch downstream.

    GstQuery* query = gst_query_new_segment(GST_FORMAT_TIME);
    gboolean queryResult = gst_element_query(m_pipeline, query);

    if (!queryResult) {
        gst_query_unref(query);
        return true;
    }

    GstFormat format;
    gint64 position;
    if (!gst_element_query_position(m_pipeline, &format, &position))
        position = 0;

    gdouble rate;
    gint64 startValue, stopValue;
    gst_query_parse_segment(query, &rate, &format, &startValue, &stopValue);

    GstEvent* event = gst_event_new_new_segment(FALSE, rate, format, startValue, stopValue, position);
    gst_pad_push_event(srcPad.get(), event);

    gst_query_unref(query);
    return true;
}
Exemplo n.º 18
0
GstElement*			ly_ppl_video_get_element		(char *name)
{
	GstElement *ele=NULL;
	ele=gst_bin_get_by_name(GST_BIN(ly_ppl_video_bin), name);
	return ele;
}
bool GStreamerFramesReceiver::LoadVideo(char * URL)
{
	GstStateChangeReturn res;

	/* Initialize GStreamer */
	gst_init(NULL, NULL);

	/* Build the pipeline */
	GError *error = NULL;
	char * init_str = g_strdup_printf("rtspsrc location=%s latency=1000 drop-on-latency=false ! queue ! rtph264depay ! queue2 ! avdec_h264 ! queue2 ! appsink name=mysink", URL);
	pipeline = gst_parse_launch(init_str, &error);
	g_free(init_str);
	
	if (error)
	{
		gchar * message = g_strdup_printf("Unable to build pipeline: %s", error -> message);
		g_clear_error(&error);
		g_free(message);
		return false;
	}

	sink = gst_bin_get_by_name(GST_BIN(pipeline), "mysink");

	/* Instruct appsink to drop old buffers when the maximum amount of queued buffers is reached. */
	gst_app_sink_set_drop(GST_APP_SINK(sink), true);

	/* Set the maximum amount of buffers that can be queued in appsink.
	 * After this amount of buffers are queued in appsink, any more buffers
	 * will block upstream elements until a sample is pulled from appsink.
	 */
	gst_app_sink_set_max_buffers(GST_APP_SINK(sink), 1);		// number of queued recived buffers in appsink before updating new frame
	g_object_set(G_OBJECT(sink), "sync", TRUE, NULL);			// GST_OBJECT

	// Registering callbacks to appsink element
	GstAppSinkCallbacks callbacks = { on_eos, new_preroll, new_buffer, NULL };
	gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, this, NULL);

	res = gst_element_set_state (pipeline, GST_STATE_PLAYING);

	if (res == GST_STATE_CHANGE_FAILURE)
	{
		g_printerr ("Unable to set the pipeline to the playing state.\n");
		gst_object_unref (pipeline);
		pipeline = NULL;
		return false;
	}
	else if (res == GST_STATE_CHANGE_NO_PREROLL)
	{
		g_print ("live sources not supported yet\n");
		gst_object_unref (pipeline);
		pipeline = NULL;
		return false;
	}
	else if (res == GST_STATE_CHANGE_ASYNC)
	{
		// can happen when buffering occurs
		GstState current, pending;
		res = gst_element_get_state(GST_ELEMENT(pipeline), &current, &pending, GST_CLOCK_TIME_NONE);
		if(res == GST_STATE_CHANGE_FAILURE || res == GST_STATE_CHANGE_ASYNC)
		{
			g_printerr ("Unable to set the pipeline to the playing state.\n");
			gst_object_unref (pipeline);
			pipeline = NULL;
			return false;
		}
	}

	bool isFrameOK = false;

	/* get the preroll buffer from appsink, this block untils appsink really prerolls */
	GstSample * sample;
	g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL);

	if (sample)
	{
		/* get the snapshot buffer format now. We set the caps on the appsink so
		 * that it can only be an rgb buffer. The only thing we have not specified
		 * on the caps is the height, which is dependant on the pixel-aspect-ratio
		 * of the source material
		 */
		GstCaps *caps = gst_sample_get_caps(sample);
		int width, height;
		PixelFormat pixelFormat;
		isFrameOK = ExtractImageParams(caps, width, height, pixelFormat);
		gst_sample_unref (sample);
	}

	if (!isFrameOK)
	{
		g_printerr ("Unable to get the snapshot buffer format.\n");
		gst_object_unref (pipeline);
		pipeline = NULL;
		return false;
	}
	
	mainLoopThread = g_thread_new("mainLoopThread", MainLoopThreadFunction, this);

	return true;
}
Exemplo n.º 20
0
/**
 * gst_wrapper_camera_bin_src_construct_pipeline:
 * @bcamsrc: camerasrc object
 *
 * This function creates and links the elements of the camerasrc bin
 * videosrc ! cspconv ! srcfilter ! cspconv ! capsfilter ! crop ! scale ! \
 * capsfilter ! tee name=t
 *    t. ! ... (viewfinder pad)
 *    t. ! output-selector name=outsel
 *        outsel. ! (image pad)
 *        outsel. ! (video pad)
 *
 * Returns: TRUE, if elements were successfully created, FALSE otherwise
 */
static gboolean
gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
  GstBin *cbin = GST_BIN (bcamsrc);
  GstElement *tee;
  GstElement *filter_csp;
  GstElement *src_csp;
  GstElement *capsfilter;
  gboolean ret = FALSE;
  GstPad *vf_pad;
  GstPad *tee_capture_pad;
  GstPad *src_caps_src_pad;

  if (!self->elements_created) {

    GST_DEBUG_OBJECT (self, "constructing pipeline");

    /* Add application set or default video src element */
    if (!(self->src_vid_src = gst_camerabin_setup_default_element (cbin,
                self->app_vid_src, "autovideosrc", DEFAULT_VIDEOSRC,
                "camerasrc-real-src"))) {
      self->src_vid_src = NULL;
      goto done;
    } else {
      if (!gst_camerabin_add_element (cbin, self->src_vid_src)) {
        goto done;
      }
    }
    /* we lost the reference */
    self->app_vid_src = NULL;

    /* we listen for changes to max-zoom in the video src so that
     * we can proxy them to the basecamerasrc property */
    if (g_object_class_find_property (G_OBJECT_GET_CLASS (bcamsrc), "max-zoom")) {
      g_signal_connect (G_OBJECT (self->src_vid_src), "notify::max-zoom",
          (GCallback) gst_wrapper_camera_bin_src_max_zoom_cb, bcamsrc);
    }

    /* add a buffer probe to the src elemento to drop EOS from READY->NULL */
    {
      GstPad *pad;
      pad = gst_element_get_static_pad (self->src_vid_src, "src");

      self->src_event_probe_id = gst_pad_add_event_probe (pad,
          (GCallback) gst_wrapper_camera_src_src_event_probe, self);
      gst_object_unref (pad);
    }

    if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace",
            "src-colorspace"))
      goto done;

    if (!(self->src_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "src-capsfilter")))
      goto done;

    /* attach to notify::caps on the first capsfilter and use a callback
     * to recalculate the zoom properties when these caps change and to
     * propagate the caps to the second capsfilter */
    src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");
    g_signal_connect (src_caps_src_pad, "notify::caps",
        G_CALLBACK (gst_wrapper_camera_bin_src_caps_cb), self);
    gst_object_unref (src_caps_src_pad);

    if (!(self->src_zoom_crop =
            gst_camerabin_create_and_add_element (cbin, "videocrop",
                "zoom-crop")))
      goto done;
    if (!(self->src_zoom_scale =
            gst_camerabin_create_and_add_element (cbin, "videoscale",
                "zoom-scale")))
      goto done;
    if (!(self->src_zoom_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "zoom-capsfilter")))
      goto done;

    if (!(tee =
            gst_camerabin_create_and_add_element (cbin, "tee",
                "camerasrc-tee")))
      goto done;

    /* viewfinder pad */
    vf_pad = gst_element_get_request_pad (tee, "src%d");
    g_object_set (tee, "alloc-pad", vf_pad, NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
    gst_object_unref (vf_pad);

    /* image/video pad from tee */
    tee_capture_pad = gst_element_get_request_pad (tee, "src%d");

    self->output_selector =
        gst_element_factory_make ("output-selector", "outsel");
    g_object_set (self->output_selector, "pad-negotiation-mode", 0, NULL);
    gst_bin_add (GST_BIN (self), self->output_selector);
    {
      GstPad *pad = gst_element_get_static_pad (self->output_selector, "sink");

      /* check return TODO */
      gst_pad_link (tee_capture_pad, pad);
      gst_object_unref (pad);
    }
    gst_object_unref (tee_capture_pad);

    /* Create the 2 output pads for video and image */
    self->outsel_vidpad =
        gst_element_get_request_pad (self->output_selector, "src%d");
    self->outsel_imgpad =
        gst_element_get_request_pad (self->output_selector, "src%d");

    g_assert (self->outsel_vidpad != NULL);
    g_assert (self->outsel_imgpad != NULL);

    gst_pad_add_buffer_probe (self->outsel_imgpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_imgsrc_probe), self);
    gst_pad_add_buffer_probe (self->outsel_vidpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_vidsrc_probe), self);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->imgsrc),
        self->outsel_imgpad);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc),
        self->outsel_vidpad);

    if (bcamsrc->mode == MODE_IMAGE) {
      g_object_set (self->output_selector, "active-pad", self->outsel_imgpad,
          NULL);
    } else {
      g_object_set (self->output_selector, "active-pad", self->outsel_vidpad,
          NULL);
    }



    gst_pad_set_active (self->vfsrc, TRUE);
    gst_pad_set_active (self->imgsrc, TRUE);    /* XXX ??? */
    gst_pad_set_active (self->vidsrc, TRUE);    /* XXX ??? */
  }

  /* Do this even if pipeline is constructed */

  if (self->video_filter) {
    /* check if we need to replace the current one */
    if (self->video_filter != self->app_vid_filter) {
      gst_bin_remove (cbin, self->video_filter);
      gst_object_unref (self->video_filter);
      self->video_filter = NULL;
      filter_csp = gst_bin_get_by_name (cbin, "filter-colorspace");
      gst_bin_remove (cbin, filter_csp);
      gst_object_unref (filter_csp);
      filter_csp = NULL;
    }
  }

  if (!self->video_filter) {
    if (self->app_vid_filter) {
      self->video_filter = gst_object_ref (self->app_vid_filter);
      filter_csp = gst_element_factory_make ("ffmpegcolorspace",
          "filter-colorspace");
      gst_bin_add_many (cbin, self->video_filter, filter_csp, NULL);
      src_csp = gst_bin_get_by_name (cbin, "src-colorspace");
      capsfilter = gst_bin_get_by_name (cbin, "src-capsfilter");
      if (gst_pad_is_linked (gst_element_get_static_pad (src_csp, "src")))
        gst_element_unlink (src_csp, capsfilter);
      if (!gst_element_link_many (src_csp, self->video_filter, filter_csp,
              capsfilter, NULL))
        goto done;
    }
  }
  ret = TRUE;
  self->elements_created = TRUE;
done:
  return ret;
}
Exemplo n.º 21
0
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mDecoder->SetMediaDuration(mLastParserDuration);
  } else {
    LOG(PR_LOG_DEBUG, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mDecoder->SetMediaDuration(duration);
    } else {
      mDecoder->SetMediaSeekable(false);
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
  mInfo.mVideo.mHasVideo = n_video != 0;
  mInfo.mAudio.mHasAudio = n_audio != 0;

  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(PR_LOG_DEBUG, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(PR_LOG_DEBUG, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  timestamp = gst_segment_to_stream_time(&mAudioSegment,
      GST_FORMAT_TIME, timestamp);

  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}
Exemplo n.º 22
0
static gboolean
gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
{
  GstElement *csp = NULL;
  GstElement *videoscale = NULL;
  GstPad *firstpad = NULL;
  const gchar *missing_element_name;
  gboolean newsink = FALSE;
  gboolean updated_converters = FALSE;

  GST_DEBUG_OBJECT (vfbin, "Creating internal elements");

  /* First check if we need to add/replace the internal sink */
  if (vfbin->video_sink) {
    if (vfbin->user_video_sink && vfbin->video_sink != vfbin->user_video_sink) {
      gst_bin_remove (GST_BIN_CAST (vfbin), vfbin->video_sink);
      gst_object_unref (vfbin->video_sink);
      vfbin->video_sink = NULL;
    }
  }

  if (!vfbin->video_sink) {
    if (vfbin->user_video_sink)
      vfbin->video_sink = gst_object_ref (vfbin->user_video_sink);
    else {
      vfbin->video_sink = gst_element_factory_make ("autovideosink",
          "vfbin-sink");
      if (!vfbin->video_sink) {
        missing_element_name = "autovideosink";
        goto missing_element;
      }
    }

    gst_bin_add (GST_BIN_CAST (vfbin), gst_object_ref (vfbin->video_sink));
    newsink = TRUE;
  }

  /* check if we want add/remove the conversion elements */
  if (vfbin->elements_created && vfbin->disable_converters) {
    /* remove the elements, user doesn't want them */

    gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
    csp = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-csp");
    videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-videoscale");

    gst_bin_remove (GST_BIN_CAST (vfbin), csp);
    gst_bin_remove (GST_BIN_CAST (vfbin), videoscale);

    gst_object_unref (csp);
    gst_object_unref (videoscale);

    updated_converters = TRUE;
  } else if (!vfbin->elements_created && !vfbin->disable_converters) {
    gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);

    /* add the elements, user wants them */
    csp = gst_element_factory_make ("videoconvert", "vfbin-csp");
    if (!csp) {
      missing_element_name = "videoconvert";
      goto missing_element;
    }
    gst_bin_add (GST_BIN_CAST (vfbin), csp);

    videoscale = gst_element_factory_make ("videoscale", "vfbin->videoscale");
    if (!videoscale) {
      missing_element_name = "videoscale";
      goto missing_element;
    }
    gst_bin_add (GST_BIN_CAST (vfbin), videoscale);

    gst_element_link_pads_full (csp, "src", videoscale, "sink",
        GST_PAD_LINK_CHECK_NOTHING);

    vfbin->elements_created = TRUE;
    GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");

    updated_converters = TRUE;
  }
  /* otherwise, just leave it as is */

  /* if sink was replaced -> link it to the internal converters */
  if (newsink && !vfbin->disable_converters) {
    gboolean unref = FALSE;
    if (!videoscale) {
      videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin),
          "vfbin-videscale");
      unref = TRUE;
    }

    if (!gst_element_link_pads_full (videoscale, "src", vfbin->video_sink,
            "sink", GST_PAD_LINK_CHECK_CAPS)) {
      GST_ELEMENT_ERROR (vfbin, CORE, NEGOTIATION, (NULL),
          ("linking videoscale and viewfindersink failed"));
    }

    if (unref)
      gst_object_unref (videoscale);
    videoscale = NULL;
  }

  /* Check if we need a new ghostpad target */
  if (updated_converters || (newsink && vfbin->disable_converters)) {
    if (vfbin->disable_converters) {
      firstpad = gst_element_get_static_pad (vfbin->video_sink, "sink");
    } else {
      /* csp should always exist at this point */
      firstpad = gst_element_get_static_pad (csp, "sink");
    }
  }

  /* need to change the ghostpad target if firstpad is set */
  if (firstpad) {
    if (!gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), firstpad))
      goto error;
    gst_object_unref (firstpad);
    firstpad = NULL;
  }

  return TRUE;

missing_element:
  gst_element_post_message (GST_ELEMENT_CAST (vfbin),
      gst_missing_element_message_new (GST_ELEMENT_CAST (vfbin),
          missing_element_name));
  GST_ELEMENT_ERROR (vfbin, CORE, MISSING_PLUGIN,
      (_("Missing element '%s' - check your GStreamer installation."),
          missing_element_name), (NULL));
  goto error;

error:
  GST_WARNING_OBJECT (vfbin, "Creating internal elements failed");
  if (firstpad)
    gst_object_unref (firstpad);
  return FALSE;
}
Exemplo n.º 23
0
/* Helper function to test delayed linking support in parse_launch by creating
 * a test element based on bin, which contains a fakesrc and a sometimes 
 * pad-template, and trying to link to a fakesink. When the bin transitions
 * to paused it adds a pad, which should get linked to the fakesink */
static void
run_delayed_test (const gchar * pipe_str, const gchar * peer,
    gboolean expect_link)
{
  GstElement *pipe, *src, *sink;
  GstPad *srcpad, *sinkpad, *peerpad = NULL;

  pipe = setup_pipeline (pipe_str);

  src = gst_bin_get_by_name (GST_BIN (pipe), "src");
  fail_if (src == NULL, "Test source element was not created");

  sink = gst_bin_get_by_name (GST_BIN (pipe), "sink");
  fail_if (sink == NULL, "Test sink element was not created");

  /* The src should not yet have a src pad */
  srcpad = gst_element_get_pad (src, "src");
  fail_unless (srcpad == NULL, "Source element already has a source pad");

  /* Set the state to PAUSED and wait until the src at least reaches that
   * state */
  fail_if (gst_element_set_state (pipe, GST_STATE_PAUSED) ==
      GST_STATE_CHANGE_FAILURE);

  fail_if (gst_element_get_state (src, NULL, NULL, GST_CLOCK_TIME_NONE) ==
      GST_STATE_CHANGE_FAILURE);

  /* Now, the source element should have a src pad, and if "peer" was passed, 
   * then the src pad should have gotten linked to the 'sink' pad of that 
   * peer */
  srcpad = gst_element_get_pad (src, "src");
  fail_if (srcpad == NULL, "Source element did not create source pad");

  peerpad = gst_pad_get_peer (srcpad);

  if (expect_link == TRUE) {
    fail_if (peerpad == NULL, "Source element pad did not get linked");
  } else {
    fail_if (peerpad != NULL,
        "Source element pad got linked but should not have");
  }
  if (peerpad != NULL)
    gst_object_unref (peerpad);

  if (peer != NULL) {
    GstElement *peer_elem = gst_bin_get_by_name (GST_BIN (pipe), peer);

    fail_if (peer_elem == NULL, "Could not retrieve peer %s", peer);

    sinkpad = gst_element_get_pad (peer_elem, "sink");
    fail_if (sinkpad == NULL, "Peer element did not have a 'sink' pad");

    fail_unless (peerpad == sinkpad,
        "Source src pad got connected to the wrong peer");
    gst_object_unref (sinkpad);
  }

  gst_object_unref (srcpad);

  gst_object_unref (src);
  gst_object_unref (sink);
  gst_object_unref (pipe);
}
Exemplo n.º 24
0
BOOL tsmf_gstreamer_pipeline_build(TSMFGstreamerDecoder* mdecoder)
{
#if GST_VERSION_MAJOR > 0
	const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin name=videodecoder !";
        const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";
#else
	const char* video = "appsrc name=videosource ! queue2 name=videoqueue ! decodebin2 name=videodecoder !";
	const char* audio = "appsrc name=audiosource ! queue2 name=audioqueue ! decodebin2 name=audiodecoder ! audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";
#endif
	char pipeline[1024];

	if (!mdecoder)
		return FALSE;

	/* TODO: Construction of the pipeline from a string allows easy overwrite with arguments.
	 *       The only fixed elements necessary are appsrc and the volume element for audio streams.
	 *       The rest could easily be provided in gstreamer pipeline notation from command line. */
	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		sprintf_s(pipeline, sizeof(pipeline), "%s %s name=videosink", video, tsmf_platform_get_video_sink());
	else
		sprintf_s(pipeline, sizeof(pipeline), "%s %s name=audiosink", audio, tsmf_platform_get_audio_sink());

	DEBUG_TSMF("pipeline=%s", pipeline);
	mdecoder->pipe = gst_parse_launch(pipeline, NULL);

	if (!mdecoder->pipe)
	{
		WLog_ERR(TAG, "Failed to create new pipe");
		return FALSE;
	}

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosource");
	else
		mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosource");

	if (!mdecoder->src)
	{
		WLog_ERR(TAG, "Failed to get appsrc");
		return FALSE;
	}

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videoqueue");
	else
		mdecoder->queue = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audioqueue");

	if (!mdecoder->queue)
	{
		WLog_ERR(TAG, "Failed to get queue");
		return FALSE;
	}

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "videosink");
	else
		mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiosink");

	if (!mdecoder->outsink)
	{
		WLog_ERR(TAG, "Failed to get sink");
		return FALSE;
	}

	g_signal_connect(mdecoder->outsink, "child-added", G_CALLBACK(cb_child_added), mdecoder);

	if (mdecoder->media_type == TSMF_MAJOR_TYPE_AUDIO)
	{
		mdecoder->volume = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiovolume");

		if (!mdecoder->volume)
		{
			WLog_ERR(TAG, "Failed to get volume");
			return FALSE;
		}

		tsmf_gstreamer_change_volume((ITSMFDecoder*)mdecoder, mdecoder->gstVolume*((double) 10000), mdecoder->gstMuted);
	}

	tsmf_platform_register_handler(mdecoder);
	/* AppSrc settings */
	GstAppSrcCallbacks callbacks =
	{
		tsmf_gstreamer_need_data,
		tsmf_gstreamer_enough_data,
		tsmf_gstreamer_seek_data
	};
	g_object_set(mdecoder->src, "format", GST_FORMAT_TIME, NULL);
	g_object_set(mdecoder->src, "is-live", FALSE, NULL);
	g_object_set(mdecoder->src, "block", FALSE, NULL);
	g_object_set(mdecoder->src, "blocksize", 1024, NULL);
	gst_app_src_set_caps((GstAppSrc *) mdecoder->src, mdecoder->gst_caps);
	gst_app_src_set_callbacks((GstAppSrc *)mdecoder->src, &callbacks, mdecoder, NULL);
	gst_app_src_set_stream_type((GstAppSrc *) mdecoder->src, GST_APP_STREAM_TYPE_SEEKABLE);
	gst_app_src_set_latency((GstAppSrc *) mdecoder->src, 0, -1);
	gst_app_src_set_max_bytes((GstAppSrc *) mdecoder->src, (guint64) 0);//unlimited
	g_object_set(G_OBJECT(mdecoder->queue), "use-buffering", FALSE, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "use-rate-estimate", FALSE, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "max-size-buffers", 0, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "max-size-bytes", 0, NULL);
	g_object_set(G_OBJECT(mdecoder->queue), "max-size-time", (guint64) 0, NULL);

	/* Only set these properties if not an autosink, otherwise we will set properties when real sinks are added */
	if (!g_strcmp0(G_OBJECT_TYPE_NAME(mdecoder->outsink), "GstAutoVideoSink") && !g_strcmp0(G_OBJECT_TYPE_NAME(mdecoder->outsink), "GstAutoAudioSink"))
	{
		if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		{
			gst_base_sink_set_max_lateness((GstBaseSink *) mdecoder->outsink, 10000000); /* nanoseconds */
		}
		else
		{
			gst_base_sink_set_max_lateness((GstBaseSink *) mdecoder->outsink, 10000000); /* nanoseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "buffer-time", (gint64) 20000, NULL); /* microseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "drift-tolerance", (gint64) 20000, NULL); /* microseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "latency-time", (gint64) 10000, NULL); /* microseconds */
			g_object_set(G_OBJECT(mdecoder->outsink), "slave-method", 1, NULL);
		}
		g_object_set(G_OBJECT(mdecoder->outsink), "sync", TRUE, NULL); /* synchronize on the clock */
		g_object_set(G_OBJECT(mdecoder->outsink), "async", TRUE, NULL); /* no async state changes */
	}

	tsmf_window_create(mdecoder);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_READY);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING);
	mdecoder->pipeline_start_time_valid = 0;
	mdecoder->shutdown = 0;
	mdecoder->paused = FALSE;

	GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(mdecoder->pipe), GST_DEBUG_GRAPH_SHOW_ALL, get_type(mdecoder));

	return TRUE;
}
Exemplo n.º 25
0
BOOL tsmf_gstreamer_pipeline_build(TSMFGstreamerDecoder* mdecoder)
{
	const char* appsrc = "appsrc name=source ! decodebin name=decoder !";
	const char* video = "autovideoconvert ! videoscale !";
	const char* audio = "audioconvert ! audiorate ! audioresample ! volume name=audiovolume !";
	char pipeline[1024];

	if (!mdecoder)
		return FALSE;

	/* TODO: Construction of the pipeline from a string allows easy overwrite with arguments.
	 *       The only fixed elements necessary are appsrc and the volume element for audio streams.
	 *       The rest could easily be provided in gstreamer pipeline notation from command line. */
	if (mdecoder->media_type == TSMF_MAJOR_TYPE_VIDEO)
		sprintf_s(pipeline, sizeof(pipeline), "%s %s %s name=outsink", appsrc, video, tsmf_platform_get_video_sink());
	else
		sprintf_s(pipeline, sizeof(pipeline), "%s %s %s name=outsink", appsrc, audio, tsmf_platform_get_audio_sink());

	DEBUG_TSMF("pipeline=%s", pipeline);
	mdecoder->pipe = gst_parse_launch(pipeline, NULL);

	if (!mdecoder->pipe)
	{
		WLog_ERR(TAG, "Failed to create new pipe");
		return FALSE;
	}

	mdecoder->src = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "source");

	if (!mdecoder->src)
	{
		WLog_ERR(TAG, "Failed to get appsrc");
		return FALSE;
	}

	mdecoder->outsink = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "outsink");

	if (!mdecoder->outsink)
	{
		WLog_ERR(TAG, "Failed to get sink");
		return FALSE;
	}

	if (mdecoder->media_type != TSMF_MAJOR_TYPE_VIDEO)
	{
		mdecoder->volume = gst_bin_get_by_name(GST_BIN(mdecoder->pipe), "audiovolume");

		if (!mdecoder->volume)
		{
			WLog_ERR(TAG, "Failed to get volume");
			return FALSE;
		}
	}

	tsmf_platform_register_handler(mdecoder);
	/* AppSrc settings */
	GstAppSrcCallbacks callbacks =
	{
		tsmf_gstreamer_need_data,
		tsmf_gstreamer_enough_data,
		tsmf_gstreamer_seek_data
	};
	g_object_set(mdecoder->src, "format", GST_FORMAT_TIME, NULL);
	g_object_set(mdecoder->src, "is-live", TRUE, NULL);
	g_object_set(mdecoder->src, "block", TRUE, NULL);
	gst_app_src_set_caps((GstAppSrc *) mdecoder->src, mdecoder->gst_caps);
	gst_app_src_set_callbacks((GstAppSrc *)mdecoder->src, &callbacks, mdecoder, NULL);
	gst_app_src_set_stream_type((GstAppSrc *) mdecoder->src, GST_APP_STREAM_TYPE_SEEKABLE);
	tsmf_window_create(mdecoder);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_READY);
	tsmf_gstreamer_pipeline_set_state(mdecoder, GST_STATE_PLAYING);
	mdecoder->pipeline_start_time_valid = 0;
	mdecoder->shutdown = 0;

	GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(mdecoder->pipe), GST_DEBUG_GRAPH_SHOW_ALL, get_type(mdecoder));

	return TRUE;
}
Exemplo n.º 26
0
int
main (int argc, char **argv)
{

#ifdef WIN32
  HGLRC sdl_gl_context = 0;
  HDC sdl_dc = 0;
#else
  SDL_SysWMinfo info;
  Display *sdl_display = NULL;
  Window sdl_win = 0;
  GLXContext sdl_gl_context = NULL;
#endif

  GMainLoop *loop = NULL;
  GstPipeline *pipeline = NULL;
  GstBus *bus = NULL;
  GstElement *glfilter = NULL;
  GstElement *fakesink = NULL;
  GstState state;
  GAsyncQueue *queue_input_buf = NULL;
  GAsyncQueue *queue_output_buf = NULL;
  GstGLDisplay *display;
  GstGLContext *sdl_context;
  const gchar *platform;

  /* Initialize SDL for video output */
  if (SDL_Init (SDL_INIT_VIDEO) < 0) {
    fprintf (stderr, "Unable to initialize SDL: %s\n", SDL_GetError ());
    return -1;
  }

  /* Create a 640x480 OpenGL screen */
  if (SDL_SetVideoMode (640, 480, 0, SDL_OPENGL) == NULL) {
    fprintf (stderr, "Unable to create OpenGL screen: %s\n", SDL_GetError ());
    SDL_Quit ();
    return -1;
  }

  /* Set the title bar in environments that support it */
  SDL_WM_SetCaption ("SDL and gst-plugins-gl", NULL);


  /* Loop, drawing and checking events */
  InitGL (640, 480);

  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* retrieve and turn off sdl opengl context */
#ifdef WIN32
  sdl_gl_context = wglGetCurrentContext ();
  sdl_dc = wglGetCurrentDC ();
  wglMakeCurrent (0, 0);
  platform = "wgl";
  display = gst_gl_display_new ();
#else
  SDL_VERSION (&info.version);
  SDL_GetWMInfo (&info);
  /* FIXME: This display is different to the one that SDL uses to create the
   * GL context inside SDL_SetVideoMode() above which fails on Intel hardware
   */
  sdl_display = info.info.x11.display;
  sdl_win = info.info.x11.window;
  sdl_gl_context = glXGetCurrentContext ();
  glXMakeCurrent (sdl_display, None, 0);
  platform = "glx";
  display = (GstGLDisplay *) gst_gl_display_x11_new_with_display (sdl_display);
#endif

  sdl_context = gst_gl_context_new_wrapped (display, (guintptr) sdl_gl_context,
      gst_gl_platform_from_string (platform), GST_GL_API_OPENGL);

  pipeline =
      GST_PIPELINE (gst_parse_launch
      ("videotestsrc ! video/x-raw, width=320, height=240, framerate=(fraction)30/1 ! "
          "gleffects effect=5 ! fakesink sync=1", NULL));

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb), loop);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb), loop);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), loop);
  gst_object_unref (bus);

  /* sdl_gl_context is an external OpenGL context with which gst-plugins-gl want to share textures */
  glfilter = gst_bin_get_by_name (GST_BIN (pipeline), "gleffects0");
  g_object_set (G_OBJECT (glfilter), "other-context", sdl_context, NULL);
  gst_object_unref (glfilter);

  /* NULL to PAUSED state pipeline to make sure the gst opengl context is created and
   * shared with the sdl one */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
  state = GST_STATE_PAUSED;
  if (gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL,
          GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) {
    g_debug ("failed to pause pipeline\n");
    return -1;
  }

  /* turn on back sdl opengl context */
#ifdef WIN32
  wglMakeCurrent (sdl_dc, sdl_gl_context);
#else
  glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context);
#endif

  /* append a gst-gl texture to this queue when you do not need it no more */
  fakesink = gst_bin_get_by_name (GST_BIN (pipeline), "fakesink0");
  g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL);
  g_signal_connect (fakesink, "handoff", G_CALLBACK (on_gst_buffer), NULL);
  queue_input_buf = g_async_queue_new ();
  queue_output_buf = g_async_queue_new ();
  g_object_set_data (G_OBJECT (fakesink), "queue_input_buf", queue_input_buf);
  g_object_set_data (G_OBJECT (fakesink), "queue_output_buf", queue_output_buf);
  g_object_set_data (G_OBJECT (fakesink), "loop", loop);
  gst_object_unref (fakesink);

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

  g_main_loop_run (loop);

  /* before to deinitialize the gst-gl-opengl context,
   * no shared context (here the sdl one) must be current
   */
#ifdef WIN32
  wglMakeCurrent (0, 0);
#else
  glXMakeCurrent (sdl_display, sdl_win, sdl_gl_context);
#endif

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
  gst_object_unref (pipeline);

  /* turn on back sdl opengl context */
#ifdef WIN32
  wglMakeCurrent (sdl_dc, sdl_gl_context);
#else
  glXMakeCurrent (sdl_display, None, 0);
#endif

  SDL_Quit ();

  /* make sure there is no pending gst gl buffer in the communication queues 
   * between sdl and gst-gl
   */
  while (g_async_queue_length (queue_input_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_input_buf);
    gst_buffer_unref (buf);
  }

  while (g_async_queue_length (queue_output_buf) > 0) {
    GstBuffer *buf = (GstBuffer *) g_async_queue_pop (queue_output_buf);
    gst_buffer_unref (buf);
  }

  return 0;
}
Exemplo n.º 27
0
/*
 * Makes a pipeline in the form:
 * filesrc location=file ! demuxer ! fakesink
 *
 * And gets the tags that are posted on the bus to compare
 * with the tags in 'tag_str'
 */
static void
test_demux_tags (const gchar * tag_str, const gchar * demuxer,
    const gchar * file)
{
  GstElement *pipeline;
  GstBus *bus;
  GMainLoop *loop;
  GstTagList *sent_tags;
  gint i, j, n_recv, n_sent;
  const gchar *name_sent, *name_recv;
  const GValue *value_sent, *value_recv;
  gboolean found;
  gint comparison;
  GstElement *demux;
  gchar *launch_str;
  guint bus_watch = 0;

  GST_DEBUG ("testing tags : %s", tag_str);

  if (received_tags) {
    gst_tag_list_free (received_tags);
    received_tags = NULL;
  }

  launch_str = g_strdup_printf ("filesrc location=%s ! %s name=demux ! "
      "fakesink", file, demuxer);
  pipeline = gst_parse_launch (launch_str, NULL);
  g_free (launch_str);
  fail_unless (pipeline != NULL);

  demux = gst_bin_get_by_name (GST_BIN (pipeline), "demux");
  fail_unless (demux != NULL);

  loop = g_main_loop_new (NULL, TRUE);
  fail_unless (loop != NULL);

  bus = gst_element_get_bus (pipeline);
  fail_unless (bus != NULL);
  bus_watch = gst_bus_add_watch (bus, bus_handler, loop);
  gst_object_unref (bus);

  sent_tags = gst_structure_from_string (tag_str, NULL);
  fail_unless (sent_tags != NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  GST_DEBUG ("mainloop done : %p", received_tags);

  /* verify tags */
  fail_unless (received_tags != NULL);
  n_recv = gst_structure_n_fields (received_tags);
  n_sent = gst_structure_n_fields (sent_tags);
  fail_unless (n_recv >= n_sent);
  /* FIXME: compare taglits values */
  for (i = 0; i < n_sent; i++) {
    name_sent = gst_structure_nth_field_name (sent_tags, i);
    value_sent = gst_structure_get_value (sent_tags, name_sent);
    found = FALSE;
    for (j = 0; j < n_recv; j++) {
      name_recv = gst_structure_nth_field_name (received_tags, j);
      if (!strcmp (name_sent, name_recv)) {
        value_recv = gst_structure_get_value (received_tags, name_recv);
        comparison = gst_value_compare (value_sent, value_recv);
        if (comparison != GST_VALUE_EQUAL) {
          gchar *vs = g_strdup_value_contents (value_sent);
          gchar *vr = g_strdup_value_contents (value_recv);
          GST_DEBUG ("sent = %s:'%s', recv = %s:'%s'",
              G_VALUE_TYPE_NAME (value_sent), vs,
              G_VALUE_TYPE_NAME (value_recv), vr);
          g_free (vs);
          g_free (vr);
        }
        fail_unless (comparison == GST_VALUE_EQUAL,
            "tag item %s has been received with different type or value",
            name_sent);
        found = TRUE;
        break;
      }
    }
    fail_unless (found, "tag item %s is lost", name_sent);
  }

  gst_tag_list_free (received_tags);
  received_tags = NULL;
  gst_tag_list_free (sent_tags);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_main_loop_unref (loop);
  g_object_unref (demux);
  g_object_unref (pipeline);
  g_source_remove (bus_watch);
}
Exemplo n.º 28
0
Arquivo: m1.cpp Projeto: jhgorse/mog
///////////////////////////////////////////////////////////////////////////////////////////////////
/// main()
///
/// The main function. Creates the pipeline and makes it go.
///////////////////////////////////////////////////////////////////////////////////////////////////
int main(int argc, char *argv[])
{
	// Initialize GStreamer
	gst_init (&argc, &argv);
	
	// Parse the target hostname/IP
	if (argc != 2)
	{
		g_printerr("Usage: %s [host or ip]\n", argv[0]);
		return -1;
	}
	const gchar* target = argv[1];
	g_assert(target != NULL);

	// Parse the pipeline from the string above
	pipeline = gst_parse_launch(PIPELINE_STRING, NULL);
	if (pipeline == NULL)
	{
		g_printerr("Failed to create pipeline!\n");
		return -1;
	}
	
	// Set the clients property of the UDP sink elements
	GstElement* element = gst_bin_get_by_name(GST_BIN(pipeline), "vsink");
	g_assert(element != NULL);
	const gchar* clients_value = g_strdup_printf("%s:10000", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);
	
	element = gst_bin_get_by_name(GST_BIN(pipeline), "vcsink");
	g_assert(element != NULL);
	clients_value = g_strdup_printf("%s:10001", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);
	
	element = gst_bin_get_by_name(GST_BIN(pipeline), "asink");
	g_assert(element != NULL);
	clients_value = g_strdup_printf("%s:10002", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);
	
	element = gst_bin_get_by_name(GST_BIN(pipeline), "acsink");
	g_assert(element != NULL);
	clients_value = g_strdup_printf("%s:10003", target);
	g_object_set(element, "clients", clients_value, NULL);
	g_free(const_cast<gchar*>(clients_value));
	gst_object_unref(element);

	// Create a pipeline tracer for latency / jitter information
	PipelineTracer* pTracer = new PipelineTracer(pipeline);

	// Put the pipeline in the playing state
	GstStateChangeReturn ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
	if (ret == GST_STATE_CHANGE_FAILURE)
	{
		g_printerr("Unable to set the pipeline to the playing state.\n");
		gst_object_unref(pipeline);
		return -1;
	}
	
	// Dump to dot file (if GST_DEBUG_DUMP_DOT_DIR is set) to ${GST_DEBUG_DUMP_DOT_DIR}/.dot.
	// We wait until the pipeline is playing to make sure pads are linked.
	GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, argv[0]);
	
	// Assign the SIGINT handler to send EOS
	struct sigaction sigact;
	sigact.sa_handler = on_sig_int;
	sigemptyset(&sigact.sa_mask);
	sigact.sa_flags = 0;
	sigaction(SIGINT, &sigact, NULL);
	g_print("Playing... press Ctrl-C to terminate.\n");
  
	// Wait until error or EOS
	GstBus* bus = gst_element_get_bus(pipeline);
	GstMessage* msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
   
	// Parse message and print stuff about it.
	if (msg != NULL)
	{
		GError *err;
		gchar *debug_info;
		
		switch (GST_MESSAGE_TYPE(msg))
		{
			case GST_MESSAGE_ERROR:
				gst_message_parse_error(msg, &err, &debug_info);
				g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
				g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
				g_clear_error(&err);
				g_free(debug_info);
				break;
				
			case GST_MESSAGE_EOS:
				g_print("End-Of-Stream reached.\n");
				break;
				
			default:
				// We should not reach here because we only asked for ERRORs and EOS
				g_printerr("Unexpected message received.\n");
				break;
		} // END switch(message type)
		gst_message_unref(msg);
	} // END if (message)

	// Free resources
	delete pTracer;
	gst_object_unref(bus);
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);
	
	return 0;
} // END main()
Exemplo n.º 29
0
    void RTSPserverGate::setLocation(std::string a)
	{

	if (factory->key)
	    {

	    if (!lock_transition && pid == 0)
		{
		lock_transition = true;
		this->location = a;
		GstStateChangeReturn ret;
		GstPad *pad;
		GstState state;
		GstState rtspstate;
		gchar *key;
		GstRTSPMedia *media;
		GstRTSPMediaFactoryClass *klass;
		GstElement * source;
		GstElement * buffer;
		std::clog << "Key:" << factory->key << std::endl;
		klass = GST_RTSP_MEDIA_FACTORY_GET_CLASS(this->factory);
		g_mutex_lock(this->factory->medias_lock);
		media = static_cast<GstRTSPMedia*> (g_hash_table_lookup(
			factory->medias, factory->key));

		if (media)
		    g_object_ref(media);

		if (media)
		    {
		    source = gst_bin_get_by_name(GST_BIN(media->element),
			    "gate");
		    if (source)
			{
			std::clog << "########## address:" << std::endl;

			GstIterator* it = gst_element_iterate_src_pads(source);
			GstIteratorResult result = GST_ITERATOR_OK;
			if (result == GST_ITERATOR_OK)
			    {
			    gpointer p;
			    result = gst_iterator_next(it, &p);
			    GstPad* pad = GST_PAD(p);
			    std::clog << "PadName: " << gst_pad_get_name(pad)
				    << std::endl;
			    gst_pad_set_blocked_async(pad, TRUE,
				    gate_block_async_cb, this);
			    //g_object_unref(pad);
			    }
			gst_iterator_free(it);

			}
		    g_mutex_unlock(this->factory->medias_lock);
		    g_object_unref(media);

		    }
		}

	    }

	else
	    {
	    this->location = a;
	    this->pipeline
		    = "( rtspsrc location=" + this->location
			    + " latency=1  name=gate ! rtph264depay name=buffer byte-stream=false  ! queue2 max-size-bytes=100000000 use-buffering=true ! rtph264pay name=pay0 pt=96 )";



	    }
	}
    void* RendStreamer::run()
    {

        cout << "rendstreamer started " << endl;
        gchar * description=(gchar *) g_malloc(1000);
        g_sprintf(description,
//"webmmux name=stream appsrc name=mysrc ! video/x-raw,format=RGB,width=%d,height=%d,framerate=20/1 ! videoconvert ! vp8enc !  stream.",
"webmmux name=stream appsrc name=mysrc ! video/x-raw,format=RGB,width=%d,height=%d,framerate=5/1 ! videoconvert ! vp8enc  !  stream.",  
                this->pconf->width,this->pconf->height);
        gint argc=0;
        gst_init (&argc, NULL);

        GError *err = NULL;
        GstElement * bin = gst_parse_launch (description, &err);

        g_free(description);

          if ( (!bin) || (err)) {
            g_print ("invalid pipeline: %s\n", err->message);
            g_clear_error (&err);
            exit(-2);
          }

          multisocketsink = gst_element_factory_make ("multisocketsink", NULL);


 printf("parsed %d, %d\n",this->pconf->width,this->pconf->height);
          /* setup appsrc */
          GstElement *appsrc = gst_bin_get_by_name (GST_BIN (bin), "mysrc");
          if (!appsrc) {
            g_print ("no element with name \"appsrc\" found\n");
            gst_object_unref (bin);
            exit(-3);
          }
          /*
          g_object_set (G_OBJECT (appsrc), "caps",
                gst_caps_new_simple ("video/x-raw",
                             "format", G_TYPE_STRING, "RGB",
                             "width", G_TYPE_INT, this->width,
                             "height", G_TYPE_INT, this->height,
                             "framerate", GST_TYPE_FRACTION, 1, 2,
                             NULL), NULL);


    */

     gst_app_src_set_size (GST_APP_SRC (appsrc), (gint64) -1); // total stream size is not known
     gst_app_src_set_stream_type(GST_APP_SRC (appsrc),GST_APP_STREAM_TYPE_STREAM);
     g_object_set (G_OBJECT (appsrc),"format", GST_FORMAT_TIME, NULL);


            /*setup muxer*/
            GstElement *stream = gst_bin_get_by_name (GST_BIN (bin), "stream");
              if (!stream) {
                g_print ("no element with name \"stream\" found\n");
                gst_object_unref (bin);
                exit(-3);
              }

              GstPad *srcpad = gst_element_get_static_pad (stream, "src");
              if (!srcpad) {
                g_print ("no \"src\" pad in element \"stream\" found\n");
                gst_object_unref (stream);
                gst_object_unref (bin);
                exit(-4);
              }

              GstPad *ghostpad = gst_ghost_pad_new ("src", srcpad);
              gst_element_add_pad (GST_ELEMENT (bin), ghostpad);
              gst_object_unref (srcpad);


         /* add to pipeline */
          pipeline = gst_pipeline_new ("pipeline");
          gst_bin_add_many (GST_BIN (pipeline), bin,multisocketsink, NULL);

          /* link with multi socket */
          GstPad *sinkpad = gst_element_get_static_pad (multisocketsink, "sink");
          gst_pad_link (ghostpad, sinkpad);
          gst_object_unref (sinkpad);

          /*get the bus */
          GstBus *bus = gst_element_get_bus (pipeline);
          gst_bus_add_signal_watch (bus);
          g_signal_connect (bus, "message", G_CALLBACK (on_message), NULL);
          gst_object_unref (bus);

            /*call backs */
          g_signal_connect (multisocketsink, "client-socket-removed",G_CALLBACK (on_client_socket_removed), NULL);
          g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), this);

          /* setup main loop */
          loop = g_main_loop_new (NULL, FALSE);
          if (gst_element_set_state (pipeline,GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) {
                    gst_object_unref (pipeline);
                    g_main_loop_unref (loop);
                    g_print ("Failed to set pipeline to ready\n");
                    exit(-5);
          }

          /*setup server*/
          service = g_socket_service_new ();
          g_socket_listener_add_inet_port (G_SOCKET_LISTENER (service), this->pconf->port, NULL,NULL);
          g_signal_connect (service, "incoming", G_CALLBACK (on_new_connection), NULL);
          g_socket_service_start (service);
          g_print ("Listening on http://127.0.0.1:%u/\n",this->pconf->port);

        /* start main loop */
        g_main_loop_run (loop);
        cout << "F**k " << endl;
        return NULL;
    }