コード例 #1
0
ファイル: cvcap_gstreamer.cpp プロジェクト: alejotima/opencv
//
// start the pipeline, grab a buffer, and pause again
//
static int icvGrabFrame_GStreamer(CvCapture_GStreamer *cap)
{
	if(!cap->pipeline)
		return 0;

	if(gst_app_sink_is_eos(GST_APP_SINK(cap->appsink))) {
		//printf("end of stream\n");
		return 0;
	}

	if(cap->buffer)
		gst_buffer_unref(cap->buffer);

	icvHandleMessage(cap);

	if(!gst_app_sink_get_queue_length(GST_APP_SINK(cap->appsink))) {
//		printf("no buffers queued, starting pipeline\n");

		if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PLAYING) ==
		GST_STATE_CHANGE_FAILURE) {
			icvHandleMessage(cap);
			return 0;
		}

// 		icvHandleMessage(cap);
//
// 		// check whether stream contains an acceptable video stream
// 		GstPad *sinkpad = gst_element_get_pad(cap->colour, "sink");
// 		if(!GST_PAD_IS_LINKED(sinkpad)) {
// 			gst_object_unref(sinkpad);
// 			fprintf(stderr, "GStreamer: Pipeline is NOT ready. Format unknown?\n");
// 			return 0;
// 		}
// 		gst_object_unref(sinkpad);

// 		printf("pulling preroll\n");
//
// 		if(!gst_app_sink_pull_preroll(GST_APP_SINK(cap->appsink))) {
// 			printf("no preroll\n");
// 			return 0;
// 		}

//		printf("pulling buffer\n");

		cap->buffer = gst_app_sink_pull_buffer(GST_APP_SINK(cap->appsink));

//		printf("pausing pipeline\n");

		if(gst_element_set_state(GST_ELEMENT(cap->pipeline), GST_STATE_PAUSED) ==
		GST_STATE_CHANGE_FAILURE) {
			icvHandleMessage(cap);
			return 0;
		}

//		printf("pipeline paused\n");
	} else {
//		printf("peeking buffer, %d buffers in queue\n",
//		       gst_app_sink_get_queue_length(GST_APP_SINK(cap->appsink)));
		cap->buffer = gst_app_sink_peek_buffer(GST_APP_SINK(cap->appsink));
	}

	if(!cap->buffer)
		return 0;

//	printf("pulled buffer %p\n", cap->buffer);

	return 1;
}
コード例 #2
0
ファイル: gst-typefind.c プロジェクト: AlerIl/gstreamer0.10
static void
typefind_file (const gchar * filename)
{
  GstStateChangeReturn sret;
  GstElement *pipeline;
  GstElement *source;
  GstElement *typefind;
  GstElement *fakesink;
  GstState state;
  GstCaps *caps = NULL;
  GDir *dir;

  if ((dir = g_dir_open (filename, 0, NULL))) {
    const gchar *entry;

    while ((entry = g_dir_read_name (dir))) {
      gchar *path;

      path = g_strconcat (filename, G_DIR_SEPARATOR_S, entry, NULL);
      typefind_file (path);
      g_free (path);
    }

    g_dir_close (dir);
    return;
  }

  pipeline = gst_pipeline_new ("pipeline");

  source = gst_element_factory_make ("filesrc", "source");
  g_assert (GST_IS_ELEMENT (source));
  typefind = gst_element_factory_make ("typefind", "typefind");
  g_assert (GST_IS_ELEMENT (typefind));
  fakesink = gst_element_factory_make ("fakesink", "fakesink");
  g_assert (GST_IS_ELEMENT (typefind));

  gst_bin_add_many (GST_BIN (pipeline), source, typefind, fakesink, NULL);
  gst_element_link_many (source, typefind, fakesink, NULL);

  g_signal_connect (G_OBJECT (typefind), "have-type",
      G_CALLBACK (have_type_handler), &caps);

  g_object_set (source, "location", filename, NULL);

  GST_DEBUG ("Starting typefinding for %s", filename);

  /* typefind will only commit to PAUSED if it actually finds a type;
   * otherwise the state change fails */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);

  /* wait until state change either completes or fails */
  sret = gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL, -1);

  switch (sret) {
    case GST_STATE_CHANGE_FAILURE:{
      GstMessage *msg;
      GstBus *bus;
      GError *err = NULL;

      bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
      msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
      gst_object_unref (bus);

      if (msg) {
        gst_message_parse_error (msg, &err, NULL);
        g_printerr ("%s - FAILED: %s\n", filename, err->message);
        g_error_free (err);
        gst_message_unref (msg);
      } else {
        g_printerr ("%s - FAILED: unknown error\n", filename);
      }
      break;
    }
    case GST_STATE_CHANGE_SUCCESS:{
      if (caps) {
        gchar *caps_str;

        caps_str = gst_caps_to_string (caps);
        g_print ("%s - %s\n", filename, caps_str);
        g_free (caps_str);
        gst_caps_unref (caps);
      } else {
        g_print ("%s - %s\n", filename, "No type found");
      }
      break;
    }
    default:
      g_assert_not_reached ();
  }

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
}
コード例 #3
0
int
main (gint argc, gchar ** argv)
{
  static GMainLoop *loop;
  GstCaps *caps;
  GstBus *bus;
  gchar *uri;

  const gchar *video_device = "/dev/video0";

  if (argc < 2) {
    fprintf (stderr, "oops, please give a file to play\n");
    return -1;
  }

  uri = g_filename_to_uri (argv[1], NULL, NULL);
  if (!uri) {
    fprintf (stderr, "failed to create the uri\n");
    return -1;
  }

  /* init gst */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);
  /* init elements */
  playbin = gst_element_factory_make ("playbin", "app_playbin");
  pipeline = gst_pipeline_new ("app_pipeline");
  v4l2src = gst_element_factory_make ("v4l2src", "app_v4l2src");
  videoscale = gst_element_factory_make ("videoscale", "app_videoscale");
  videoconvert_in =
      gst_element_factory_make ("videoconvert", "app_videoconvert_in");
  handdetect = gst_element_factory_make ("handdetect", "app_handdetect");
  videoconvert_out =
      gst_element_factory_make ("videoconvert", "app_videoconvert_out");
  xvimagesink = gst_element_factory_make ("xvimagesink", "app_xvimagesink");

  /* check init results */
  if (!playbin || !pipeline || !v4l2src || !videoscale || !videoconvert_in
      || !handdetect || !videoconvert_out || !xvimagesink)
    g_error ("ERROR: element init failed.\n");

  /* set values */
  g_object_set (G_OBJECT (playbin), "uri", uri, NULL);
  g_object_set (G_OBJECT (v4l2src), "device", video_device, NULL);

  /* set caps */
  caps =
      gst_caps_from_string
      ("video/x-raw, format=(string)RGB, width=320, height=240, framerate=(fraction)30/1");

  /* set bus */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, pipeline,
      NULL);
  gst_object_unref (bus);

  /* add elements to pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
      v4l2src,
      videoscale,
      videoconvert_in, handdetect, videoconvert_out, xvimagesink, NULL);

  /* negotiate caps */
  if (!gst_element_link_filtered (v4l2src, videoscale, caps)) {
    g_printerr ("ERROR:v4l2src -> videoscale caps\n");
    return 0;
  }
  gst_caps_unref (caps);

  /* link elements */
  gst_element_link_many (videoscale,
      videoconvert_in, handdetect, videoconvert_out, xvimagesink, NULL);

  /* change states */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* start main loop */
  g_main_loop_run (loop);

  /* clean all */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  gst_element_set_state (playbin, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (playbin));

  return 0;
}
コード例 #4
0
ファイル: stream.c プロジェクト: cxp1991/WORKING
int stream ()
{
	printf("[stream]\n");
	GThread *video_send, *send_audio, *text_receive, *audio_receive, *check_connection;

	/* Init Glib */
	g_type_init();
	gloop = g_main_loop_new(NULL, FALSE);
	io_stdin = g_io_channel_unix_new(fileno(stdin));

	/* Allocate data */
	RpiData_SendVideo = (Rpi_Data*)malloc(sizeof(Rpi_Data));
	RpiData_SendAudio = (Rpi_Data*)malloc(sizeof(Rpi_Data));
	RpiData_ReceiveAudio = (Rpi_Data*)malloc(sizeof(Rpi_Data));
	RpiData_Text = (Rpi_Data*)malloc(sizeof(Rpi_Data));

	/* Set all flag to FALSE */
	video_send_gathering_done = FALSE;
	send_audio_gathering_done = FALSE;
	receive_audio_gathering_done = FALSE;
	text_gathering_done = FALSE;

	printf("[stream]video_send_gathering_done = %d\n", video_send_gathering_done);
	printf ("		==========	STREAMING	========== \n");
	/* Init video streaming */
	video_send = g_thread_new("send video", &_video_send_main, NULL);
	//send_audio = g_thread_new("send audio", &_send_audio_main, NULL);
	//audio_receive = g_thread_new("receive audio", &_audio_receive_main, NULL);
	//text_receive = g_thread_new("text send+receive", &_text_receive_main, NULL);
	check_connection = g_thread_new("Check if android exit or not", &_check_connection, NULL);

	/* Unalocate all object */
	g_main_loop_run (gloop);

	printf("\n[stream]g_main_loop_quit (gloop);\n");

	/* Free libnice agent & gstreamer pipeline */

	/* Free send video */
	printf("[stream] Agent = %d!\n", RpiData_SendVideo->agent);
	g_object_unref(RpiData_SendVideo->agent);
	gst_object_unref (RpiData_SendVideo->bus);
	gst_element_set_state (RpiData_SendVideo->pipeline, GST_STATE_NULL);
	gst_object_unref (RpiData_SendVideo->pipeline);

//	/* Free send audio */
//	g_object_unref(RpiData_SendAudio->agent);
//	gst_object_unref (RpiData_SendAudio->bus);
//	gst_element_set_state (RpiData_SendAudio->pipeline, GST_STATE_NULL);
//	gst_object_unref (RpiData_SendAudio->pipeline);
//
//	/* Free receive audio */
//	g_object_unref(RpiData_ReceiveAudio->agent);
//	gst_object_unref (RpiData_ReceiveAudio->bus);
//	gst_element_set_state (RpiData_ReceiveAudio->pipeline, GST_STATE_NULL);
//	gst_object_unref (RpiData_ReceiveAudio->pipeline);
//
//	/* Free text */
//	g_object_unref(RpiData_Text->agent);
	//gst_object_unref (RpiData_Text->bus);
	//gst_element_set_state (RpiData_Text->pipeline, GST_STATE_NULL);
	//gst_object_unref (RpiData_Text->pipeline);

	free(RpiData_SendVideo);
//	free(RpiData_SendAudio);
//	free(RpiData_ReceiveAudio);
//	free(RpiData_Text);

	/* Free threads */
	g_thread_join (video_send);
//	g_thread_join (send_audio);
//	g_thread_join (audio_receive);
//	g_thread_join (text_receive);
	g_thread_join (check_connection);
	g_main_loop_unref (gloop);

	return 0;
}
コード例 #5
0
bool ofGstUtils::startPipeline(){

	bPaused 			= true;
	speed 				= 1.0f;


	GstBus * bus = gst_pipeline_get_bus (GST_PIPELINE(gstPipeline));

	if(bus){
		busWatchID = gst_bus_add_watch (bus, (GstBusFunc) busFunction, this);
	}

	gst_object_unref(bus);

	if(isAppSink){
		ofLogVerbose("ofGstUtils") << "startPipeline(): attaching callbacks";
		// set the appsink to not emit signals, we are using callbacks instead
		// and frameByFrame to get buffers by polling instead of callback
		g_object_set (G_OBJECT (gstSink), "emit-signals", FALSE, "sync", !bFrameByFrame, (void*)NULL);
		// gst_app_sink_set_drop(GST_APP_SINK(gstSink),1);
		// gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink),2);

		if(!bFrameByFrame){
			GstAppSinkCallbacks gstCallbacks;
			gstCallbacks.eos = &on_eos_from_source;
			gstCallbacks.new_preroll = &on_new_preroll_from_source;
#if GST_VERSION_MAJOR==0
			gstCallbacks.new_buffer = &on_new_buffer_from_source;
#else
			gstCallbacks.new_sample = &on_new_buffer_from_source;
#endif

			gst_app_sink_set_callbacks(GST_APP_SINK(gstSink), &gstCallbacks, this, NULL);
		}
	}

	// pause the pipeline
	//GstState targetState;
	GstState state;
	auto ret = gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_PAUSED);
    switch (ret) {
		case GST_STATE_CHANGE_FAILURE:
			ofLogError("ofGstUtils") << "startPipeline(): unable to pause pipeline";
			return false;
			break;
		case GST_STATE_CHANGE_NO_PREROLL:
			ofLogVerbose() << "Pipeline is live and does not need PREROLL waiting PLAY";
			gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_PLAYING);
			if(isAppSink){
				gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink),1);
				gst_app_sink_set_drop (GST_APP_SINK(gstSink),true);
			}
			break;
		case GST_STATE_CHANGE_ASYNC:
			ofLogVerbose() << "Pipeline is PREROLLING";
			//targetState = GST_STATE_PAUSED;
			if(!isStream && gst_element_get_state(gstPipeline,&state,NULL,5*GST_SECOND)!=GST_STATE_CHANGE_SUCCESS){
				ofLogError("ofGstUtils") << "startPipeline(): unable to pause pipeline after 5s";
				return false;
			}else{
				ofLogVerbose() << "Pipeline is PREROLLED";
			}
			break;
		case GST_STATE_CHANGE_SUCCESS:
			ofLogVerbose() << "Pipeline is PREROLLED";
			break;
    }

	// wait for paused state to query the duration
	if(!isStream){
		bPlaying = true;
		bLoaded = true;
	}

	return true;
}
コード例 #6
0
ファイル: pipeline.cpp プロジェクト: ChinnaSuhas/ossbuild
void Pipeline::unconfigure() const
{
    gst_element_set_state (m_pipeline, GST_STATE_NULL);
    gst_object_unref (m_pipeline);
}
コード例 #7
0
ファイル: gstServer.c プロジェクト: scox/Server
  int gstServer(int port, char * ip, char * path)
#endif
{

  GstElement *sink;
  GstBus *bus;

  /* Initialisation */
  gst_init (NULL, NULL);
  loop = g_main_loop_new (NULL, FALSE);

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("client");
  source = gst_element_factory_make ("filesrc", "file-source");
  sink = gst_element_factory_make ("autoaudiosink", "client");

  if (!pipeline || !source || !sink)
    {
      g_printerr ("One element could not be created. Exiting.\n");
      return -1;
    }

  /* Set up the pipeline */
  /* we set the input filename to the source element */
#ifdef STANDALONE
  g_object_set (G_OBJECT (source), "location", argv[1], NULL);
#else
  g_object_set (G_OBJECT (source), "location", path, NULL);
#endif
  g_object_set (G_OBJECT (source), "host", ip, NULL);
  g_object_set (G_OBJECT (source), "port", port, NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  /* source | sink */
  gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);


  /* we link the elements together */
  /* source -> sink */
  gst_element_link (source, sink);

  /* Set the pipeline to "playing" state*/
#ifdef STANDALONE
  g_print ("Now playing: %s\n", argv[1]);
#else
  g_print ("Now playing: %s\n", path);
#endif

  gst_element_set_state (pipeline, GST_STATE_PLAYING);


  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);


  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}
コード例 #8
0
ファイル: GstShow.cpp プロジェクト: bonfus/GstMadness
/*!
 * \brief GstOutput::close
 * ends the pipeline by sending EOS and destroys the pipeline and all
 * elements afterwards
 */
void GstShow::close()
{
    GstStateChangeReturn status;
    if (pipeline)
    {
        handleMessage(pipeline);
        //
        //for (int i=0; i<2;i++)
        //{
        //    if (gst_app_src_end_of_stream(GST_APP_SRC(source[i])) != GST_FLOW_OK)
        //    {
        //        L_(lwarning) << ("Cannot send EOS to GStreamer pipeline\n");
        //        return;
        //    }
        //}
        //
        ////wait for EOS to trickle down the pipeline. This will let all elements finish properly
        //GstBus* bus = gst_element_get_bus(pipeline);
        //GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
        //if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR)
        //{
        //    L_(lwarning) << ("Error during GstOut finalization\n");
        //    return;
        //}
        //
        //if(msg != NULL)
        //{
        //    gst_message_unref(msg);
        //    g_object_unref(G_OBJECT(bus));
        //}
        
        status = gst_element_set_state (pipeline, GST_STATE_NULL);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            // wait for status update
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage (pipeline);
            gst_object_unref (GST_OBJECT (pipeline));
            pipeline = NULL;
            L_(lwarning) << ("Unable to stop gstreamer pipeline\n");
            return;
        }
        
        status = gst_element_set_state (pipeline, GST_STATE_NULL);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            // wait for status update
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage (pipeline);
            gst_object_unref (GST_OBJECT (pipeline));
            pipeline = NULL;
            L_(lwarning) << ("Unable to stop gstreamer pipeline\n");
            return;
        }        

        gst_object_unref (GST_OBJECT (pipeline));
        pipeline = NULL;
    }
}
コード例 #9
0
int main (int argc, char *argv[])
{

  GMainLoop *loop;
  GstPad *videopad;
  GstBus *bus;
  guint bus_watch_id;

  file_variations[0]  = MIN_QUALITY;
  file_variations[1]  = SD_QUALITY;
  file_variations[2]  = HD_QUALITY;
  
  scale_variations[0] = SCALE_SD;
  scale_variations[1] = SCALE_HD;
  scale_variations[2] = SCALE_SD;
  scale_variations[3] = SCALE_HD;
  scale_variations[4] = SCALE_MIN;
  scale_variations[5] = SCALE_HD;
  scale_variations[6] = SCALE_SD;
  scale_variations[7] = SCALE_MIN;

  initial_latency     = FALSE;
  current_file        = 1;
  num_changes         = 0;
  runs                = MEASUREMENT_RUNS;
  int opt_index       = 0, c = 0;
  char *scale_str     = DEFAULT_SCALE;
  char *decoder_name  = DEFAULT_DECODER;
  char *sink_name     = DEFAULT_SINK;
  char *filename      = DEFAULT_FILE;
  int display         = WANT_DISPLAY;
  int decoder_option  = WANT_FILE;
  int window_size     = DEFAULT_WINDOW_SIZE;
  int frequency       = DEFAULT_FREQUENCY;
  float ret_factor    = DEFAULT_RET_FACTOR, 
        ext_factor    = DEFAULT_EXT_FACTOR;
  gboolean measure    = DEFAULT_MEASUREMENT;
  gboolean sync       = DEFAULT_SYNC;
  gboolean dynamic    = DEFAULT_DYNAMIC;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);

  /* Parse options. */
  while(c != -1)
  {
    static struct option long_options[] =
    {
      {"file",      required_argument, 0, 'f'},
      {"uri",       required_argument, 0, 'u'},
      {"retarget",  required_argument, 0, 'r'},
      {"extend",    required_argument, 0, 'e'},
      {"size",      required_argument, 0, 's'},
      {"scale",     required_argument, 0, 'c'},
      {"frequency", required_argument, 0, 'd'},
      {"runs",      required_argument, 0, 'n'},
      {"measure",   no_argument,       0, 'm'},
      {"no-sync",   no_argument,       0, 'p'},
      {"initial",   no_argument,       0, 'i'},
      {"dynamic",   no_argument,       0, 't'}
    };

    c = getopt_long(argc, argv, "f:r:e:s:c:m:p:t", long_options, &opt_index);

    if(c == -1)
      break;

    switch(c)
    {
      case 'f':
        filename = optarg;
        break;
      case 'u':
        filename = optarg;
        decoder_name = "uridecodebin";
        decoder_option = WANT_URI;
        break;
      case 'r':
        ret_factor = strtof(optarg, NULL);
        break;
      case 'e':
        ext_factor = strtof(optarg, NULL);
        break;
      case 's':
        window_size = atoi(optarg);
        break;
      case 'c':
        scale = gst_element_factory_make ("videoscale", "scale");
        capfilt = gst_element_factory_make ("capsfilter","capsfilt");
        if(!scale || !capfilt)
        {
          g_printerr ("One element could not be created. Exiting.\n");
          return -1;
        }
        scale_str = optarg;
        GstCaps *scale_caps =  gst_caps_from_string(scale_str);
        g_assert(scale_caps);
        g_object_set (G_OBJECT (capfilt), "caps", scale_caps, NULL);
        gst_caps_unref(scale_caps);
        break;
      case 'm':
        measure = TRUE;
        break;
      case 'd':
        frequency = atoi(optarg);
        break;
      case 'n':
        runs = atoi(optarg) - 1;
        break;
      case 'p':
        sync = FALSE;
        break;
      case 't':
        dynamic = TRUE;
        break;
      case 'i':
        current_file = 0;
        initial_latency = TRUE;
        break;
      default:
        break;
    }
  }

  /* Create gstreamer elements. */
  pipeline  = gst_pipeline_new ("pipeline");
  que       = gst_element_factory_make ("queue",     "que");
  que2      = gst_element_factory_make ("queue",    "que2");
  seam      = gst_element_factory_make ("seamcrop",  "seamcrop");
  decoder   = gst_element_factory_make (decoder_name, "decoder");
  sink      = gst_element_factory_make (sink_name,   "video-output");

  if (!pipeline || !que || !que2 || !seam || !decoder || !sink) 
  {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set seamcrop element properties. */
  g_object_set (G_OBJECT (seam), "retargeting-factor", ret_factor, NULL);
  g_object_set (G_OBJECT (seam), "extend-border", ext_factor, NULL);
  g_object_set (G_OBJECT (seam), "frame-window-size", window_size, NULL);
  g_object_set (G_OBJECT (seam), "measurement", measure, NULL);

  /* Disregard stream synchronization if requested. */
  if(sync == FALSE) 
    g_object_set (G_OBJECT (sink), "sync", sync, NULL); 

  /* Options for the decoder. Sets up pipeline and links accordingly. */
  switch (decoder_option)
  {
    case WANT_FILE:
      source = gst_element_factory_make ("filesrc", "file-source");
      if(!source)
      { 
        g_printerr("Filesrc could not be created. Exiting"); 
        return -1;
      }
      g_object_set (G_OBJECT (source), "location", filename, NULL);

      gst_bin_add_many(GST_BIN(pipeline), source, decoder, NULL);
      gst_element_link (source, decoder);
      break;
    case WANT_URI:
      g_object_set (G_OBJECT (decoder), "uri", filename, NULL); 
      gst_bin_add(GST_BIN(pipeline), decoder);
      break;
    default:
      break;
  }

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* set up pad callback connector. */
  g_signal_connect (decoder, "pad-added", G_CALLBACK (on_pad_added), NULL);

  /* set up videobin. */
  video = gst_bin_new("videobin");

  if(scale) {
    videopad = gst_element_get_static_pad(scale,"sink");
    gst_bin_add_many(GST_BIN(video), scale, capfilt, que, seam, que2, sink, NULL);

    // Link elements.
    gst_element_link(scale,capfilt);
    gst_element_link(capfilt,que);
    gst_element_link(que,seam);
    gst_element_link(seam,que2);
    gst_element_link(que2,sink);

  } else {
    videopad = gst_element_get_static_pad(que,"sink");
    gst_bin_add_many(GST_BIN(video), que, seam, que2, sink, NULL);

    // Link elements.
    gst_element_link(que,seam);
    gst_element_link(seam,que2);
    gst_element_link(que2,sink);
  }

  // Add a ghost pad to the video bin so that it can be used as an element.
  gst_element_add_pad(video,
      gst_ghost_pad_new("sink",videopad));

  gst_object_unref(videopad);

  // Add 'video' as element in 'pipeline'.
  gst_bin_add(GST_BIN(pipeline),video);

  /* Set the pipeline to "playing" state*/
  g_print ("Now playing.\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Timeout to change input resolution */
  if(dynamic)
    g_timeout_add_seconds (frequency, change_input, loop);
  else if(decoder_option == WANT_URI) 
    g_timeout_add_seconds (frequency, force_resolution, loop);

  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);

  return 0;
}
コード例 #10
0
bool ofGstUtils::startPipeline(){

	bPaused 			= true;
	speed 				= 1.0f;


	if(gst_element_set_state (GST_ELEMENT(gstPipeline), GST_STATE_READY) ==	GST_STATE_CHANGE_FAILURE) {
		ofLogError("ofGstUtils") << "startPipeline(): unable to set pipeline to ready";
		return false;
	}
	if(gst_element_get_state (GST_ELEMENT(gstPipeline), NULL, NULL, 10 * GST_SECOND)==GST_STATE_CHANGE_FAILURE){
		ofLogError("ofGstUtils") << "startPipeline(): unable to get pipeline ready status";
		return false;
	}

	// pause the pipeline
	if(gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_PAUSED) ==	GST_STATE_CHANGE_FAILURE) {
		ofLogError("ofGstUtils") << "startPipeline(): unable to pause pipeline";
		return false;
	}

	// wait for paused state to query the duration
	if(!isStream){
		GstState state = GST_STATE_PAUSED;
		if(gst_element_get_state(gstPipeline,&state,NULL,2*GST_SECOND)==GST_STATE_CHANGE_FAILURE){
			ofLogError("ofGstUtils") << "startPipeline(): unable to get pipeline paused state";
			return false;
		}
		bPlaying = true;
		bLoaded = true;
	}

	bus = gst_pipeline_get_bus (GST_PIPELINE(gstPipeline));

	if(bus){
		gst_bus_add_watch (bus, (GstBusFunc) busFunction, this);
	}



	if(isAppSink){
		ofLogVerbose("ofGstUtils") << "startPipeline(): attaching callbacks";
		// set the appsink to not emit signals, we are using callbacks instead
		// and frameByFrame to get buffers by polling instead of callback
		g_object_set (G_OBJECT (gstSink), "emit-signals", FALSE, "sync", !bFrameByFrame, (void*)NULL);
		//gst_app_sink_set_drop(GST_APP_SINK(gstSink),1);
		//gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink),2);

		if(!bFrameByFrame){
			GstAppSinkCallbacks gstCallbacks;
			gstCallbacks.eos = &on_eos_from_source;
			gstCallbacks.new_preroll = &on_new_preroll_from_source;
#if GST_VERSION_MAJOR==0
			gstCallbacks.new_buffer = &on_new_buffer_from_source;
#else
			gstCallbacks.new_sample = &on_new_buffer_from_source;
#endif

			gst_app_sink_set_callbacks(GST_APP_SINK(gstSink), &gstCallbacks, this, NULL);
		}

	}

	if(!isStream){
		setSpeed(1.0);
	}


	return true;
}
コード例 #11
0
bool ofGstUtils::gstHandleMessage(GstBus * bus, GstMessage * msg){
	if(appsink && appsink->on_message(msg)) return true;

		ofLogVerbose("ofGstUtils") << "gstHandleMessage(): got " << GST_MESSAGE_TYPE_NAME(msg)
			<< " message from " << GST_MESSAGE_SRC_NAME(msg);

	switch (GST_MESSAGE_TYPE (msg)) {

		case GST_MESSAGE_BUFFERING:
			gint pctBuffered;
			gst_message_parse_buffering(msg,&pctBuffered);
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): buffering " << pctBuffered;
			/*if(pctBuffered<100){
				gst_element_set_state (gstPipeline, GST_STATE_PAUSED);
			}else if(!bPaused){
				gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
			}*/
		break;

#if GST_VERSION_MAJOR==0
		case GST_MESSAGE_DURATION:{
			GstFormat format=GST_FORMAT_TIME;
			gst_element_query_duration(gstPipeline,&format,&durationNanos);
		}break;
#else
		case GST_MESSAGE_DURATION_CHANGED:
			gst_element_query_duration(gstPipeline,GST_FORMAT_TIME,&durationNanos);
			break;

#endif

		case GST_MESSAGE_STATE_CHANGED:{
			GstState oldstate, newstate, pendstate;
			gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
			if(isStream && newstate==GST_STATE_PAUSED && !bPlaying ){
				bLoaded = true;
				bPlaying = true;
				if(!bPaused){
					ofLogVerbose("ofGstUtils") << "gstHandleMessage(): setting stream pipeline to play";
					play();
				}
			}

			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): " << GST_MESSAGE_SRC_NAME(msg) << " state changed from "
					<< getName(oldstate) << " to " << getName(newstate) << " (" + getName(pendstate) << ")";
		}break;

		case GST_MESSAGE_ASYNC_DONE:
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): async done";
		break;

		case GST_MESSAGE_ERROR: {
			GError *err;
			gchar *debug;
			gst_message_parse_error(msg, &err, &debug);

			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): embedded video playback halted for plugin, module "
				<< gst_element_get_name(GST_MESSAGE_SRC (msg)) << "  reported: " << err->message;

			g_error_free(err);
			g_free(debug);

			gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL);

		}break;

			case GST_MESSAGE_EOS:
				ofLogVerbose("ofGstUtils") << "gstHandleMessage(): end of the stream";
				bIsMovieDone = true;


			if(appsink && !isAppSink) appsink->on_eos();

			switch(loopMode){

				case OF_LOOP_NORMAL:{
					GstFormat format = GST_FORMAT_TIME;
					GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT);
					gint64 pos;
#if GST_VERSION_MAJOR==0
					gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos);
#else
					gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos);
#endif
					if(!gst_element_seek(GST_ELEMENT(gstPipeline),
										speed,
										format,
										flags,
										GST_SEEK_TYPE_SET,
										0,
										GST_SEEK_TYPE_SET,
										durationNanos)) {
						ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek";
					}
				}break;

				case OF_LOOP_PALINDROME:{
					GstFormat format = GST_FORMAT_TIME;
					GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT);
					gint64 pos;
#if GST_VERSION_MAJOR==0
					gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos);
#else
					gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos);
#endif
					float loopSpeed;
					if(pos>0)
						loopSpeed=-speed;
					else
						loopSpeed=speed;
					if(!gst_element_seek(GST_ELEMENT(gstPipeline),
										loopSpeed,
										GST_FORMAT_UNDEFINED,
										flags,
										GST_SEEK_TYPE_NONE,
										0,
										GST_SEEK_TYPE_NONE,
										0)) {
						ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek";
					}
				}break;

				default:
				break;
			}

		break;

		default:
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): unhandled message from " << GST_MESSAGE_SRC_NAME(msg);
		break;
	}
	return true;
}
コード例 #12
0
ファイル: eplayer4.c プロジェクト: Lexus34/tdt-arp
int main(int argc,char* argv[]) {
    int showInfos = 0, noinput = 0;
    char file[1025] = {""};
    int speedmap = 0;
    gdouble speed = 1.0;
    printf("%s >\n", __FILE__);

    if (argc < 2)
    {
        printf("give me a filename please\n");
        exit(1);
    }

    int n = snprintf(file, sizeof(file), "%s", argv[1]);
    if (n >= sizeof(file))
    {
        printf("URL must not exceed %d characters!\n", sizeof(file) - 1);
        exit(1);
    }

    printf ("File=%s\n", file);

    gst_init(&argc, &argv);

    GstElement *m_gst_playbin;
    gchar *uri;
    uri = g_filename_to_uri(file, NULL, NULL);

    m_gst_playbin = gst_element_factory_make("playbin2", "playbin");

    printf ("URI=%s\n", uri);

    g_object_set (G_OBJECT (m_gst_playbin), "uri", uri, NULL);
    int flags = 0x47; // ( GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_TEXT );
    g_object_set (G_OBJECT (m_gst_playbin), "flags", flags, NULL);
    g_free(uri);

    //GstElement *subsink = gst_element_factory_make("appsink", "subtitle_sink");

    //gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin)), gstBusSyncHandler, this);

    gst_element_set_state (m_gst_playbin, GST_STATE_PLAYING);

        while(1) {
            int Key = 0;

            if(kbhit())
                if(noinput == 0)
            Key = getchar();
            
            if(Key == 0 || Key == 0xA)
                continue;
            
            switch (Key) {
            case 'q': //STOP
                gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
                break;

            case 'c': //CONTINUE
                gst_element_set_state (m_gst_playbin, GST_STATE_PLAYING);
                
                speed = 1.0;
                printf("Continue with speed %f\n", speed);
                gst_element_seek (m_gst_playbin, speed, GST_FORMAT_TIME, GST_SEEK_FLAG_NONE, 
                    GST_SEEK_TYPE_NONE, 0, 
                    GST_SEEK_TYPE_NONE, -1);
                
                break;

            case 'p': //PAUSE
                gst_element_set_state(m_gst_playbin, GST_STATE_PAUSED);
                break;

            case 'k': {
                int Key2 = getchar() - 48;
                double sec=0.0;

                switch (Key2) {
                    case 1: sec=-15.0;break;
                    case 4: sec=-60.0;break;
                    case 7: sec=-300.0;break;
                    case 3: sec= 15.0;break;
                    case 6: sec= 60.0;break;
                    case 9: sec= 300.0;break;
                }

                printf("seconds %d \n", Key2);
                gint64 time_nanoseconds;
                gint64 pos;
                GstFormat fmt = GST_FORMAT_TIME;
                gst_element_query_position(m_gst_playbin, &fmt, &pos);

                time_nanoseconds = pos + (sec * 1000000000);
                if (time_nanoseconds < 0) time_nanoseconds = 0;


                double seekTo = 0;
                seekTo = time_nanoseconds / 1000000000.0;
                printf("SeekTo = %02d:%02d:%02d (%.4f sec)\n", (int)((seekTo/60)/60)%60, (int)(seekTo/60)%60, (int)seekTo%60, seekTo);

                gst_element_seek (m_gst_playbin, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
                    GST_SEEK_TYPE_SET, time_nanoseconds,
                    GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
                break;
            }

            case 'l': {
                double length = 0;
                GstFormat fmt = GST_FORMAT_TIME; //Returns time in nanosecs
                gint64 len;
                gst_element_query_duration(m_gst_playbin, &fmt, &len);
                length = len / 1000000000.0;
                printf("Length = %02d:%02d:%02d (%.4f sec)\n", (int)((length/60)/60)%60, (int)(length/60)%60, (int)length%60, length);
                break;
            }
            case 'j': {
                double sec = 0;
                GstFormat fmt = GST_FORMAT_TIME; //Returns time in nanosecs
                gint64 pos;
                gst_element_query_position(m_gst_playbin, &fmt, &pos);
                sec = pos / 1000000000.0;
                printf("Pts = %02d:%02d:%02d (%.4f sec)\n", (int)((sec/60)/60)%60, (int)(sec/60)%60, (int)sec%60, sec);
                break;
            }

            case 'f':
            {
                if (speed < 1.0)
                    speed = 1.0;
                    
                speed++;

                if (speed > 4.0)
                    speed = 1.0;
                
                printf("FastForward with speed %f\n", speed);
                gst_element_seek (m_gst_playbin, speed, GST_FORMAT_TIME, GST_SEEK_FLAG_NONE, 
                    GST_SEEK_TYPE_NONE, 0, 
                    GST_SEEK_TYPE_NONE, -1);
                
                break;
            }

             case 'b':
            {
                if (speed >= 1.0)
                    speed = 0.0;
                   
                speed--;
                   
                if (speed < -4.0)
                    speed = -1.0;
                
                printf("Reverse with speed %f\n", speed);
                gst_element_seek (m_gst_playbin, speed, GST_FORMAT_TIME, GST_SEEK_FLAG_NONE, 
                    GST_SEEK_TYPE_NONE, 0, 
                    GST_SEEK_TYPE_NONE, -1);
                
                break;
            }

            case 'i':
            {
                break;
            }

            default:
            {
                printf("Control: %x\n", Key);
                printf("al:       List audio tracks\n");
                printf("ac:       List current audio track\n");
                printf("a[id]     Select audio track\n");
                printf("sl:       List subtitles\n");
                printf("sc:       List current subtitle\n");
                printf("s[id]     Select subtitles\n");
                printf("q:        Stop\n");
                printf("c:        Continue\n");
                printf("p:        Pause\n");
                printf("f:        Increase speed (Fast forward) (stepwise)\n");
                printf("b:        Decrease speed (Fast reverse) (stepwise)\n");
                printf("l:        Print duration\n");
                printf("j:        Print current PTS\n");
                printf("k[1,4,7]: Jump back [15,60,300] seconds\n");
                printf("k[3,6,9]: Jump forward [15,60,300] seconds\n");
                printf("i:        Print Info\n");
                break;
            }
        }
    }

    //printOutputCapabilities();

    exit(0);
}
コード例 #13
0
ファイル: sprinkle2.c プロジェクト: ChinnaSuhas/ossbuild
int
main (int argc, char *argv[])
{
  GstBus *bus;
  GstElement *filter, *convert, *sink;
  GstCaps *caps;
  gboolean res;
  SprinkleState *state;

  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, TRUE);

  pipeline = gst_pipeline_new ("pipeline");

  /* add the fixed part to the pipeline. Remember that we need a capsfilter
   * after adder so that multiple sources are not racing to negotiate
   * a format */
  adder = gst_element_factory_make ("adder", "adder");
  filter = gst_element_factory_make ("capsfilter", "filter");
  convert = gst_element_factory_make ("audioconvert", "convert");
  sink = gst_element_factory_make ("autoaudiosink", "sink");

  caps = gst_caps_new_simple ("audio/x-raw-int",
      "endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
      "channels", G_TYPE_INT, 2,
      "width", G_TYPE_INT, 16,
      "depth", G_TYPE_INT, 16,
      "rate", G_TYPE_INT, 44100, "signed", G_TYPE_BOOLEAN, TRUE, NULL);
  g_object_set (filter, "caps", caps, NULL);
  gst_caps_unref (caps);

  gst_bin_add_many (GST_BIN (pipeline), adder, filter, convert, sink, NULL);

  res = gst_element_link_many (adder, filter, convert, sink, NULL);
  g_assert (res);

  /* setup message handling */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH);
  g_signal_connect (bus, "message::error", (GCallback) message_received,
      pipeline);
  g_signal_connect (bus, "message::warning", (GCallback) message_received,
      pipeline);
  g_signal_connect (bus, "message::eos", (GCallback) eos_message_received,
      pipeline);

  /* we set the pipeline to PLAYING, the pipeline will not yet preroll because
   * there is no source providing data for it yet */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* and add the function that modifies the pipeline every 100ms */
  state = create_state ();
  g_timeout_add (100, (GSourceFunc) do_sprinkle, state);

  /* go to main loop */
  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  free_state (state);
  gst_object_unref (bus);
  gst_object_unref (pipeline);

  return 0;
}
コード例 #14
0
ファイル: cvcap_gstreamer.cpp プロジェクト: alejotima/opencv
static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename)
{
	CvCapture_GStreamer *capture = 0;
	CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

	__BEGIN__;

//	teststreamer(filename);

//	return 0;

	if(!isInited) {
		printf("gst_init\n");
		gst_init (NULL, NULL);

// according to the documentation this is the way to register a plugin now
// unfortunately, it has not propagated into my distribution yet...
// 		gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
// 			"opencv-appsink", "Element application sink",
// 			"0.1", appsink_plugin_init, "LGPL", "highgui", "opencv",
// 			"http://opencvlibrary.sourceforge.net/");

		isInited = true;
	}

	const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"};
//	printf("entered capturecreator %s\n", sourcetypes[type]);

	GstElement *source = gst_element_factory_make(sourcetypes[type], NULL);
	if(!source)
		return 0;

	if(type == CV_CAP_GSTREAMER_FILE)
		g_object_set(G_OBJECT(source), "location", filename, NULL);

	GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL);

	GstElement *sink = gst_element_factory_make("opencv-appsink", NULL);
	GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
//	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(sink), false);
//	g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL);

	GstElement *decodebin = gst_element_factory_make("decodebin", NULL);
	g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour);

	GstElement *pipeline = gst_pipeline_new (NULL);

	gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL);

//	printf("added many\n");

	switch(type) {
	case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps
		caps = gst_caps_new_simple("video/x-raw-rgb",
					   "width", G_TYPE_INT, 640,
					   "height", G_TYPE_INT, 480,
					   "framerate", GST_TYPE_FRACTION, 30, 1,
					   NULL);
		if(!gst_element_link_filtered(source, decodebin, caps)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		gst_caps_unref(caps);
		break;
	case CV_CAP_GSTREAMER_V4L:
	case CV_CAP_GSTREAMER_1394:
	case CV_CAP_GSTREAMER_FILE:
		if(!gst_element_link(source, decodebin)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		break;
	}

	if(!gst_element_link(colour, sink)) {
		CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink\n");
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("linked, pausing\n");

	if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) ==
	   GST_STATE_CHANGE_FAILURE) {
		CV_WARN("GStreamer: unable to set pipeline to paused\n");
//		icvHandleMessage(capture);
//		cvReleaseCapture((CvCapture **)(void *)&capture);
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("state now paused\n");

	// construct capture struct
	capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
	memset(capture, 0, sizeof(CvCapture_GStreamer));
	capture->type = type;
	capture->pipeline = pipeline;
	capture->source = source;
	capture->decodebin = decodebin;
	capture->colour = colour;
	capture->appsink = sink;

	icvHandleMessage(capture);

	OPENCV_ASSERT(capture,
                      "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture");

//	GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline));
//	printf("clock %s\n", gst_object_get_name(GST_OBJECT(clock)));

	__END__;

	return capture;
}
コード例 #15
0
/* build a pipeline equivalent to:
 *
 * gst-launch -v rtpbin name=rtpbin \
 *    $AUDIO_SRC ! audioconvert ! audioresample ! $AUDIO_ENC ! $AUDIO_PAY ! rtpbin.send_rtp_sink_0  \
 *           rtpbin.send_rtp_src_0 ! udpsink port=5002 host=$DEST                      \
 *           rtpbin.send_rtcp_src_0 ! udpsink port=5003 host=$DEST sync=false async=false \
 *        udpsrc port=5007 ! rtpbin.recv_rtcp_sink_0
 */
int
main (int argc, char *argv[])
{
  GstElement *audiosrc, *audioconv, *audiores, *audioenc, *audiopay;
  GstElement *rtpbin, *rtpsink, *rtcpsink, *rtcpsrc;
  GstElement *pipeline;
  GMainLoop *loop;
  GstPad *srcpad, *sinkpad;

  /* always init first */
  gst_init (&argc, &argv);

  /* the pipeline to hold everything */
  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);

  /* the audio capture and format conversion */
  audiosrc = gst_element_factory_make (AUDIO_SRC, "audiosrc");
  g_assert (audiosrc);
  audioconv = gst_element_factory_make ("audioconvert", "audioconv");
  g_assert (audioconv);
  audiores = gst_element_factory_make ("audioresample", "audiores");
  g_assert (audiores);
  /* the encoding and payloading */
  audioenc = gst_element_factory_make (AUDIO_ENC, "audioenc");
  g_assert (audioenc);
  audiopay = gst_element_factory_make (AUDIO_PAY, "audiopay");
  g_assert (audiopay);

  /* add capture and payloading to the pipeline and link */
  gst_bin_add_many (GST_BIN (pipeline), audiosrc, audioconv, audiores,
      audioenc, audiopay, NULL);

  if (!gst_element_link_many (audiosrc, audioconv, audiores, audioenc,
          audiopay, NULL)) {
    g_error ("Failed to link audiosrc, audioconv, audioresample, "
        "audio encoder and audio payloader");
  }

  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
  g_assert (rtpbin);

  gst_bin_add (GST_BIN (pipeline), rtpbin);

  /* the udp sinks and source we will use for RTP and RTCP */
  rtpsink = gst_element_factory_make ("udpsink", "rtpsink");
  g_assert (rtpsink);
  g_object_set (rtpsink, "port", 5002, "host", DEST_HOST, NULL);

  rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink");
  g_assert (rtcpsink);
  g_object_set (rtcpsink, "port", 5003, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL);

  rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc");
  g_assert (rtcpsrc);
  g_object_set (rtcpsrc, "port", 5007, NULL);

  gst_bin_add_many (GST_BIN (pipeline), rtpsink, rtcpsink, rtcpsrc, NULL);

  /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */
  sinkpad = gst_element_get_request_pad (rtpbin, "send_rtp_sink_0");
  srcpad = gst_element_get_static_pad (audiopay, "src");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link audio payloader to rtpbin");
  gst_object_unref (srcpad);

  /* get the RTP srcpad that was created when we requested the sinkpad above and
   * link it to the rtpsink sinkpad*/
  srcpad = gst_element_get_static_pad (rtpbin, "send_rtp_src_0");
  sinkpad = gst_element_get_static_pad (rtpsink, "sink");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link rtpbin to rtpsink");
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  /* get an RTCP srcpad for sending RTCP to the receiver */
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");
  sinkpad = gst_element_get_static_pad (rtcpsink, "sink");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link rtpbin to rtcpsink");
  gst_object_unref (sinkpad);

  /* we also want to receive RTCP, request an RTCP sinkpad for session 0 and
   * link it to the srcpad of the udpsrc for RTCP */
  srcpad = gst_element_get_static_pad (rtcpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");
  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK)
    g_error ("Failed to link rtcpsrc to rtpbin");
  gst_object_unref (srcpad);

  /* set the pipeline to playing */
  g_print ("starting sender pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* print stats every second */
  g_timeout_add_seconds (1, (GSourceFunc) print_stats, rtpbin);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  g_print ("stopping sender pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  return 0;
}
コード例 #16
0
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    close();
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    if(!isInited) {
//        printf("gst_init\n");
        gst_init (NULL, NULL);

//        gst_debug_set_active(TRUE);
//        gst_debug_set_colored(TRUE);
//        gst_debug_set_default_threshold(GST_LEVEL_WARNING);

        isInited = true;
    }
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    if(type != CV_CAP_GSTREAMER_FILE) {
        close();
        return false;
    }

    if(!gst_uri_is_valid(filename)) {
        uri = realpath(filename, NULL);
        stream=false;
        if(uri) {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        } else {
            GError *err = NULL;
            //uridecodebin = gst_parse_bin_from_description(filename, FALSE, &err);
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                CV_WARN("GStreamer: Error opening bin\n");
                close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    if(!uridecodebin) {
        uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
        g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        if(!uridecodebin) {
            CV_WARN("GStreamer: Failed to create uridecodebin\n");
            close();
            return false;
        }
    }

    if(manualpipeline) {
        GstIterator *it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
	    CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
	    return false;
        }

	pipeline = uridecodebin;
    } else {
	pipeline = gst_pipeline_new (NULL);

        color = gst_element_factory_make("ffmpegcolorspace", NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);

    {
    GstCaps* caps;
    caps = gst_caps_new_simple("video/x-raw-rgb",
			       "red_mask",   G_TYPE_INT, 0x0000FF,
			       "green_mask", G_TYPE_INT, 0x00FF00,
			       "blue_mask",  G_TYPE_INT, 0xFF0000,
			       NULL);
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
       GST_STATE_CHANGE_FAILURE) {
        CV_WARN("GStreamer: unable to set pipeline to ready\n");
        gst_object_unref(pipeline);
        return false;
    }

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
       GST_STATE_CHANGE_FAILURE) {
        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        CV_WARN("GStreamer: unable to set pipeline to playing\n");
        gst_object_unref(pipeline);
        return false;
    }



    handleMessage();

    __END__;

    return true;
}
コード例 #17
0
ファイル: gstswitchsink.c プロジェクト: JJCG/gst-plugins-good
static gboolean
gst_switch_commit_new_kid (GstSwitchSink * sink)
{
  GstPad *targetpad;
  GstState kid_state;
  GstElement *new_kid, *old_kid;
  gboolean is_fakesink = FALSE;
  GstBus *bus;

  /* need locking around member accesses */
  GST_OBJECT_LOCK (sink);
  /* If we're currently changing state, set the child to the next state
   * we're transitioning too, rather than our current state which is 
   * about to change */
  if (GST_STATE_NEXT (sink) != GST_STATE_VOID_PENDING)
    kid_state = GST_STATE_NEXT (sink);
  else
    kid_state = GST_STATE (sink);

  new_kid = sink->new_kid;
  sink->new_kid = NULL;
  GST_OBJECT_UNLOCK (sink);

  /* Fakesink by default if NULL is passed as the new child */
  if (new_kid == NULL) {
    GST_DEBUG_OBJECT (sink, "Replacing kid with fakesink");
    new_kid = gst_element_factory_make ("fakesink", "testsink");
    if (new_kid == NULL) {
      GST_ERROR_OBJECT (sink, "Failed to create fakesink");
      return FALSE;
    }
    /* Add a reference, as it would if the element came from sink->new_kid */
    gst_object_ref (new_kid);
    g_object_set (new_kid, "sync", TRUE, NULL);
    is_fakesink = TRUE;
  } else {
    GST_DEBUG_OBJECT (sink, "Setting new kid");
  }

  /* set temporary bus of our own to catch error messages from the child
   * (could we just set our own bus on it, or would the state change messages
   * from the not-yet-added element confuse the state change algorithm? Let's
   * play it safe for now) */
  bus = gst_bus_new ();
  gst_element_set_bus (new_kid, bus);
  gst_object_unref (bus);

  if (gst_element_set_state (new_kid, kid_state) == GST_STATE_CHANGE_FAILURE) {
    GstMessage *msg;

    /* check if child posted an error message and if so re-post it on our bus
     * so that the application gets to see a decent error and not our generic
     * fallback error message which is completely indecipherable to the user */
    msg = gst_bus_pop_filtered (GST_ELEMENT_BUS (new_kid), GST_MESSAGE_ERROR);
    if (msg) {
      GST_INFO_OBJECT (sink, "Forwarding kid error: %" GST_PTR_FORMAT, msg);
      gst_element_post_message (GST_ELEMENT (sink), msg);
    }
    /* FIXME: need a translated error message that tells the user to check
     * her GConf audio/video settings */
    GST_ELEMENT_ERROR (sink, CORE, STATE_CHANGE, (NULL),
        ("Failed to set state on new child."));
    gst_element_set_bus (new_kid, NULL);
    gst_object_unref (new_kid);
    return FALSE;
  }
  gst_element_set_bus (new_kid, NULL);
  gst_bin_add (GST_BIN (sink), new_kid);

  /* Now, replace the existing child */
  GST_OBJECT_LOCK (sink);
  old_kid = sink->kid;
  sink->kid = new_kid;
  /* Mark whether a custom kid or fakesink has been installed */
  sink->have_kid = !is_fakesink;
  GST_OBJECT_UNLOCK (sink);

  /* kill old element */
  if (old_kid) {
    GST_DEBUG_OBJECT (sink, "Removing old kid %" GST_PTR_FORMAT, old_kid);
    gst_element_set_state (old_kid, GST_STATE_NULL);
    gst_bin_remove (GST_BIN (sink), old_kid);
    gst_object_unref (old_kid);
  }

  /* re-attach ghostpad */
  GST_DEBUG_OBJECT (sink, "Creating new ghostpad");
  targetpad = gst_element_get_static_pad (sink->kid, "sink");
  gst_ghost_pad_set_target (GST_GHOST_PAD (sink->pad), targetpad);
  gst_object_unref (targetpad);
  GST_DEBUG_OBJECT (sink, "done changing child of switchsink");

  /* FIXME: Push new-segment info and pre-roll buffer(s) into the kid */

  /* Unblock the target pad if necessary */
  if (sink->awaiting_block) {
    gst_pad_set_blocked (sink->pad, FALSE);
    sink->awaiting_block = FALSE;
  }

  return TRUE;
}
コード例 #18
0
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
        double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    __BEGIN__;
    //actually doesn't support fourcc parameter and encode an avi with jpegenc
    //we need to find a common api between backend to support fourcc for avi
    //but also to choose in a common way codec and container format (ogg,dirac,matroska)
    // check arguments

    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);
    std::map<int,char*>::iterator encit;
    encit=encs.find(fourcc);
    if (encit==encs.end())
        CV_ERROR( CV_StsUnsupportedFormat,"Gstreamer Opencv backend doesn't support this codec acutally.");
    if(!isInited) {
        gst_init (NULL, NULL);
        isInited = true;
    }
    close();
    source=gst_element_factory_make("appsrc",NULL);
    file=gst_element_factory_make("filesink", NULL);
    enc=gst_element_factory_make(encit->second, NULL);
    mux=gst_element_factory_make("avimux", NULL);
    color = gst_element_factory_make("ffmpegcolorspace", NULL);
    if (!enc)
        CV_ERROR( CV_StsUnsupportedFormat, "Your version of Gstreamer doesn't support this codec acutally or needed plugin missing.");
    g_object_set(G_OBJECT(file), "location", filename, NULL);
    pipeline = gst_pipeline_new (NULL);
    GstCaps* caps;
    if (is_color) {
        input_pix_fmt=1;
        caps= gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                        frameSize.width,
                                        frameSize.height,
                                        (int) (fps * 1000),
                                        1000,
                                        1,
                                        1);
    }
    else  {
        input_pix_fmt=0;
        caps= gst_caps_new_simple("video/x-raw-gray",
                                  "width", G_TYPE_INT, frameSize.width,
                                  "height", G_TYPE_INT, frameSize.height,
                                  "framerate", GST_TYPE_FRACTION, int(fps),1,
                                  "bpp",G_TYPE_INT,8,
                                  "depth",G_TYPE_INT,8,
                                  NULL);
    }
    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    if (fourcc==CV_FOURCC_DEFAULT) {
        gst_bin_add_many(GST_BIN(pipeline), source, color,mux, file, NULL);
        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }
    else {
        gst_bin_add_many(GST_BIN(pipeline), source, color,enc,mux, file, NULL);
        if(!gst_element_link_many(source,color,enc,mux,file,NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }


    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
        GST_STATE_CHANGE_FAILURE) {
            CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }
    __END__;
    return true;
}
コード例 #19
0
ファイル: gstServer.c プロジェクト: scox/Server
void playGst()
{
  gst_element_set_state(pipeline, GST_STATE_PLAYING);
}
void
gst_splitmux_part_reader_unprepare (GstSplitMuxPartReader * part)
{
  gst_element_set_state (GST_ELEMENT_CAST (part), GST_STATE_NULL);
}
コード例 #21
0
static gboolean
bus_cb (GstBus *bus, GstMessage *message, RBPlayerGst *mp)
{
	const GstStructure *structure;
	g_return_val_if_fail (mp != NULL, FALSE);

	switch (GST_MESSAGE_TYPE (message)) {
	case GST_MESSAGE_ERROR: {
		char *debug;
		GError *error = NULL;
		GError *sig_error = NULL;
		int code;
		gboolean emit = TRUE;

		gst_message_parse_error (message, &error, &debug);

		/* If we've already got an error, ignore 'internal data flow error'
		 * type messages, as they're too generic to be helpful.
		 */
		if (mp->priv->emitted_error &&
		    error->domain == GST_STREAM_ERROR &&
		    error->code == GST_STREAM_ERROR_FAILED) {
			rb_debug ("Ignoring generic error \"%s\"", error->message);
			emit = FALSE;
		}

		code = rb_gst_error_get_error_code (error);

		if (emit) {
			if (message_from_sink (mp->priv->audio_sink, message)) {
				rb_debug ("got error from sink: %s (%s)", error->message, debug);
				/* Translators: the parameter here is an error message */
				g_set_error (&sig_error,
					     RB_PLAYER_ERROR,
					     code,
					     _("Failed to open output device: %s"),
					     error->message);
			} else {
				rb_debug ("got error from stream: %s (%s)", error->message, debug);
				g_set_error (&sig_error,
					     RB_PLAYER_ERROR,
					     code,
					     "%s",
					     error->message);
			}
			state_change_finished (mp, sig_error);
			mp->priv->emitted_error = TRUE;
			if (mp->priv->playbin_stream_changing) {
				emit_playing_stream_and_tags (mp, TRUE);
			}
			_rb_player_emit_error (RB_PLAYER (mp), mp->priv->stream_data, sig_error);
		}

		/* close if not already closing */
		if (mp->priv->uri != NULL)
			rb_player_close (RB_PLAYER (mp), NULL, NULL);

		g_error_free (error);
		g_free (debug);
		break;
	}

	case GST_MESSAGE_EOS:
		_rb_player_emit_eos (RB_PLAYER (mp), mp->priv->stream_data, FALSE);
		break;

	case GST_MESSAGE_STATE_CHANGED:
		{
			GstState oldstate;
			GstState newstate;
			GstState pending;
			gst_message_parse_state_changed (message, &oldstate, &newstate, &pending);
			if (GST_MESSAGE_SRC (message) == GST_OBJECT (mp->priv->playbin)) {
				if (pending == GST_STATE_VOID_PENDING) {
					rb_debug ("playbin reached state %s", gst_element_state_get_name (newstate));
					state_change_finished (mp, NULL);
				}
			}
			break;
		}

	case GST_MESSAGE_TAG: {
		GstTagList *tags;
		gst_message_parse_tag (message, &tags);

		if (mp->priv->stream_change_pending || mp->priv->playbin_stream_changing) {
			mp->priv->stream_tags = g_list_append (mp->priv->stream_tags, tags);
		} else {
			gst_tag_list_foreach (tags, (GstTagForeachFunc) process_tag, mp);
			gst_tag_list_free (tags);
		}
		break;
	}


	case GST_MESSAGE_BUFFERING: {
		gint progress;

		structure = gst_message_get_structure (message);
		if (!gst_structure_get_int (structure, "buffer-percent", &progress)) {
			g_warning ("Could not get value from BUFFERING message");
			break;
		}

		if (progress >= 100) {
			mp->priv->buffering = FALSE;
			if (mp->priv->playing) {
				rb_debug ("buffering done, setting pipeline back to PLAYING");
				gst_element_set_state (mp->priv->playbin, GST_STATE_PLAYING);
			} else {
				rb_debug ("buffering done, leaving pipeline PAUSED");
			}
		} else if (mp->priv->buffering == FALSE && mp->priv->playing) {

			rb_debug ("buffering - temporarily pausing playback");
			gst_element_set_state (mp->priv->playbin, GST_STATE_PAUSED);
			mp->priv->buffering = TRUE;
		}

		_rb_player_emit_buffering (RB_PLAYER (mp), mp->priv->stream_data, progress);
		break;
	}

	case GST_MESSAGE_APPLICATION:
		structure = gst_message_get_structure (message);
		_rb_player_emit_event (RB_PLAYER (mp), mp->priv->stream_data, gst_structure_get_name (structure), NULL);
		break;

	case GST_MESSAGE_ELEMENT:
		structure = gst_message_get_structure (message);
		if (gst_is_missing_plugin_message (message)) {
			handle_missing_plugin_message (mp, message);
		} else if (mp->priv->playbin_stream_changing &&
			   gst_structure_has_name (structure, "playbin2-stream-changed")) {
			rb_debug ("got playbin2-stream-changed message");
			mp->priv->playbin_stream_changing = FALSE;
			emit_playing_stream_and_tags (mp, TRUE);
		} else if (gst_structure_has_name (structure, "redirect")) {
			const char *uri = gst_structure_get_string (structure, "new-location");
			_rb_player_emit_redirect (RB_PLAYER (mp), mp->priv->stream_data, uri);
		}
		break;

	default:
		break;
	}

	/* emit message signals too, so plugins can process messages */
	gst_bus_async_signal_func (bus, message, NULL);

	return TRUE;
}
void
gst_splitmux_part_reader_deactivate (GstSplitMuxPartReader * reader)
{
  GST_DEBUG_OBJECT (reader, "Deactivating reader");
  gst_element_set_state (GST_ELEMENT_CAST (reader), GST_STATE_PAUSED);
}
コード例 #23
0
static int
run_test (const char *format, ...)
{
    GstStateChangeReturn ret;

    GstElement *pipe, *src, *sink;

    GstBuffer *buf = NULL;

    GstMessage *msg;

    gchar *url;

    va_list args;

    int rc = -1;

    pipe = gst_pipeline_new (NULL);

    src = gst_element_factory_make ("souphttpsrc", NULL);
    fail_unless (src != NULL);

    sink = gst_element_factory_make ("fakesink", NULL);
    fail_unless (sink != NULL);

    gst_bin_add (GST_BIN (pipe), src);
    gst_bin_add (GST_BIN (pipe), sink);
    fail_unless (gst_element_link (src, sink));

    if (http_port == 0) {
        GST_DEBUG ("failed to start soup http server");
    }
    fail_unless (http_port != 0);
    va_start (args, format);
    g_vasprintf (&url, format, args);
    va_end (args);
    fail_unless (url != NULL);
    g_object_set (src, "location", url, NULL);
    g_free (url);

    g_object_set (src, "automatic-redirect", redirect, NULL);
    if (cookies != NULL)
        g_object_set (src, "cookies", cookies, NULL);
    g_object_set (sink, "signal-handoffs", TRUE, NULL);
    g_signal_connect (sink, "preroll-handoff", G_CALLBACK (handoff_cb), &buf);

    if (user_id != NULL)
        g_object_set (src, "user-id", user_id, NULL);
    if (user_pw != NULL)
        g_object_set (src, "user-pw", user_pw, NULL);

    ret = gst_element_set_state (pipe, GST_STATE_PAUSED);
    if (ret != GST_STATE_CHANGE_ASYNC) {
        GST_DEBUG ("failed to start up soup http src, ret = %d", ret);
        goto done;
    }

    gst_element_set_state (pipe, GST_STATE_PLAYING);
    msg = gst_bus_poll (GST_ELEMENT_BUS (pipe),
                        GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
    if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
        gchar *debug = NULL;

        GError *err = NULL;

        gst_message_parse_error (msg, &err, &debug);
        GST_INFO ("error: %s", err->message);
        if (g_str_has_suffix (err->message, "Not Found"))
            rc = 404;
        else if (g_str_has_suffix (err->message, "Forbidden"))
            rc = 403;
        else if (g_str_has_suffix (err->message, "Unauthorized"))
            rc = 401;
        else if (g_str_has_suffix (err->message, "Found"))
            rc = 302;
        GST_INFO ("debug: %s", debug);
        g_error_free (err);
        g_free (debug);
        gst_message_unref (msg);
        goto done;
    }
    gst_message_unref (msg);

    /* don't wait for more than 10 seconds */
    ret = gst_element_get_state (pipe, NULL, NULL, 10 * GST_SECOND);
    GST_LOG ("ret = %u", ret);

    if (buf == NULL) {
        /* we want to test the buffer offset, nothing else; if there's a failure
         * it might be for lots of reasons (no network connection, whatever), we're
         * not interested in those */
        GST_DEBUG ("didn't manage to get data within 10 seconds, skipping test");
        goto done;
    }

    GST_DEBUG ("buffer offset = %" G_GUINT64_FORMAT, GST_BUFFER_OFFSET (buf));

    /* first buffer should have a 0 offset */
    fail_unless (GST_BUFFER_OFFSET (buf) == 0);
    gst_buffer_unref (buf);
    rc = 0;

done:

    gst_element_set_state (pipe, GST_STATE_NULL);
    gst_object_unref (pipe);
    return rc;
}
コード例 #24
0
void VideoHttpBuffer::sendStreamError(const QString &message)
{
    emit streamError(message);
    emit bufferingStopped();
    gst_element_set_state(GST_ELEMENT(m_element), GST_STATE_NULL);
}
コード例 #25
0
bool ofGstUtils::gstHandleMessage(GstBus * bus, GstMessage * msg){
	if(appsink && appsink->on_message(msg)) return true;

		/*ofLogVerbose("ofGstUtils") << "gstHandleMessage(): got " << GST_MESSAGE_TYPE_NAME(msg)
			<< " message from " << GST_MESSAGE_SRC_NAME(msg);*/

	switch (GST_MESSAGE_TYPE (msg)) {

		case GST_MESSAGE_BUFFERING:
			gint pctBuffered;
			gst_message_parse_buffering(msg,&pctBuffered);
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): buffering " << pctBuffered;
			if(pctBuffered<100){
				gst_element_set_state (gstPipeline, GST_STATE_PAUSED);
			}else if(!bPaused){
				gst_element_set_state (gstPipeline, GST_STATE_PLAYING);
			}
		break;

#if GST_VERSION_MAJOR==0
		case GST_MESSAGE_DURATION:{
			GstFormat format=GST_FORMAT_TIME;
			gst_element_query_duration(gstPipeline,&format,&durationNanos);
		}break;
#else
		case GST_MESSAGE_DURATION_CHANGED:
			gst_element_query_duration(gstPipeline,GST_FORMAT_TIME,&durationNanos);
			break;

#endif

		case GST_MESSAGE_STATE_CHANGED:{
			GstState oldstate, newstate, pendstate;
			gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
			if(isStream && newstate==GST_STATE_PAUSED && !bPlaying ){
				bLoaded = true;
				bPlaying = true;
				if(!bPaused){
					//ofLogVerbose("ofGstUtils") << "gstHandleMessage(): setting stream pipeline to play";
					play();
				}
			}

			/*ofLogVerbose("ofGstUtils") << "gstHandleMessage(): " << GST_MESSAGE_SRC_NAME(msg) << " state changed from "
					<< getName(oldstate) << " to " << getName(newstate) << " (" + getName(pendstate) << ")";*/
		}break;

		case GST_MESSAGE_ASYNC_DONE:
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): async done";
		break;

		case GST_MESSAGE_ERROR: {
			GError *err;
			gchar *debug;
			gst_message_parse_error(msg, &err, &debug);
			gchar * name = gst_element_get_name(GST_MESSAGE_SRC (msg));

			ofLogError("ofGstUtils") << "gstHandleMessage(): embedded video playback halted for plugin, module "
				<< name << "  reported: " << err->message;

			g_free(name);
			g_error_free(err);
			g_free(debug);

			gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL);

		}break;

		case GST_MESSAGE_EOS:{
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): end of the stream";
			bool isClosing = closing;
			eos_cb();

			if(isClosing){
				busWatchID = 0;
				return false;
			}

			switch(loopMode){

				case OF_LOOP_NORMAL:{
					GstFormat format = GST_FORMAT_TIME;
					GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT);
					if(speed>0){
						if(!gst_element_seek(GST_ELEMENT(gstPipeline),
											speed,
											format,
											flags,
											GST_SEEK_TYPE_SET,
											0,
											GST_SEEK_TYPE_SET,
											-1)) {
							ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek";
						}
					}else if(speed<0){
						if(!gst_element_seek(GST_ELEMENT(gstPipeline),speed, 	format,
								flags,
								GST_SEEK_TYPE_SET,
								0,
								GST_SEEK_TYPE_SET,
								durationNanos-1000000)) {
							ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek";
						}
					}
				}break;

				case OF_LOOP_PALINDROME:{
					GstFormat format = GST_FORMAT_TIME;
					GstSeekFlags flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH |GST_SEEK_FLAG_KEY_UNIT);
					gint64 pos;
					#if GST_VERSION_MAJOR==0
						gst_element_query_position(GST_ELEMENT(gstPipeline),&format,&pos);
					#else
						gst_element_query_position(GST_ELEMENT(gstPipeline),format,&pos);
					#endif
					float loopSpeed;
					if(pos>0)
						loopSpeed=-speed;
					else
						loopSpeed=speed;
					if(!gst_element_seek(GST_ELEMENT(gstPipeline),
										loopSpeed,
										GST_FORMAT_UNDEFINED,
										flags,
										GST_SEEK_TYPE_NONE,
										0,
										GST_SEEK_TYPE_NONE,
										0)) {
						ofLogWarning("ofGstUtils") << "gstHandleMessage(): unable to seek";
					}
				}break;

				default:
				break;
			}

		}break;
		case GST_MESSAGE_LATENCY:
			gst_bin_recalculate_latency (GST_BIN (getPipeline()));
			break;
		case GST_MESSAGE_REQUEST_STATE:	{
			GstState state;
			gchar *name = gst_object_get_path_string (GST_MESSAGE_SRC (msg));

			gst_message_parse_request_state (msg, &state);
			gst_element_set_state (getPipeline(), state);

			g_free (name);
			break;
		}
		case GST_MESSAGE_HAVE_CONTEXT:{
			GstContext *context;
			const gchar *context_type;
			gchar *context_str;

			gst_message_parse_have_context (msg, &context);

			context_type = gst_context_get_context_type (context);
			context_str = gst_structure_to_string (gst_context_get_structure (context));
			ofLogNotice("ofGstUtils","Got context from element '%s': %s=%s\n",
				GST_ELEMENT_NAME (GST_MESSAGE_SRC (msg)), context_type,
				context_str);
			g_free (context_str);
			gst_context_unref (context);
			break;
		}
		default:
			ofLogVerbose("ofGstUtils") << "gstHandleMessage(): unhandled message from " << GST_MESSAGE_SRC_NAME(msg);
		break;
	}

	return true;
}
コード例 #26
0
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mDecoder->SetMediaDuration(mLastParserDuration);
  } else {
    LOG(PR_LOG_DEBUG, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mDecoder->SetMediaDuration(duration);
    } else {
      mDecoder->SetMediaSeekable(false);
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
  mInfo.mVideo.mHasVideo = n_video != 0;
  mInfo.mAudio.mHasAudio = n_audio != 0;

  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(PR_LOG_DEBUG, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(PR_LOG_DEBUG, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  timestamp = gst_segment_to_stream_time(&mAudioSegment,
      GST_FORMAT_TIME, timestamp);

  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}
コード例 #27
0
ファイル: server.cpp プロジェクト: P0ppoff/SmartProjector
//FONCTION : gestion de Gstreamer
void Server::setPipeline()
{
    int nbSender=0;
    QString toLaunch;

    if(pipeline!=NULL)
    {
        gst_element_set_state (pipeline, GST_STATE_NULL);
        gst_object_unref (pipeline);
    }
    for (int i = 0; i < clients.size(); i++)
    {
        if(clients[i].isSending)
        {
            nbSender++;
        }
    }
    if(nbSender>0)
    {
        toLaunch="videomixer name=mix";

        int taille_grid; // nombre de ligne/colonne de la grille
        taille_grid = sqrt(nbSender); // donne le nombre entier dont la racine carré inférieur est le plus proche
        if (pow(taille_grid, 2) != nbSender) taille_grid++;

        int y=screen.height()/(taille_grid);
        int x=screen.width()/(taille_grid);


        int nb_client = 0;
        for (int i = 0; i < clients.size(); i++)
        {
            if(clients[i].isSending)
            {
                toLaunch+=" sink_" + QString::number(i);
                toLaunch+="::xpos=" + QString::number((screen.width() / taille_grid) * (nb_client % taille_grid));
                toLaunch+=" sink_" + QString::number(i);
                if(nb_client == 0){
                    toLaunch+="::ypos=0";
                }else{
                    toLaunch+="::ypos=" + QString::number((screen.height() / taille_grid) * ((int)(nb_client / taille_grid)));
                }
                nb_client++;
            }
        }

        
#ifndef __RPI_SERVER__
        toLaunch+=" ! autovideosink sync=false";
#else
        toLaunch+=" ! videoconvert ! ximagesink sync=false";
#endif
        for (int i = 0; i < clients.size(); i++)
        {
            if(clients[i].isSending)
            {
#ifndef __RPI_SERVER__
                toLaunch+=" udpsrc port="+ QString::number(clients[i].port)+" caps = \"application/x-rtp, media=(string)video, "
                                                                            "encoding-name=(string)VP8, payload=(int)96, framerate=(fraction)30/1\" ! rtpvp8depay ! vp8dec ! videoscale "
                                                                            "! video/x-raw , width="+ QString::number(x) +", height="+ QString::number(y) +" ! mix.";
#else
                toLaunch+=" udpsrc port="+ QString::number(clients[i].port)+" caps=\"application/x-rtp, media=video, clock-rate=90000"
                                                                            ", encoding-name=H264, framerate=30/1\" ! rtph264depay ! queue ! h264parse ! queue ! omxh264dec ! queue ! videoscale ! video/x-raw "
                                                                            ", width="+ QString::number(x) +", height="+ QString::number(y) +" ! mix.";
#endif
            }
        }
    }
    else
    {
#ifndef __RPI_SERVER__
        toLaunch="videotestsrc pattern=3 ! textoverlay font-desc=\"Sans 24\" "
                 "text=\"Connect to "+  localIp + ":"+ QString::number(serveur->serverPort()) + "\" shaded-background=true "
                                                                                                "! videoconvert ! video/x-raw , width="+ QString::number(screen.width()) +", height="+ QString::number(screen.height()) + " ! autovideosink";
#else
        toLaunch="videotestsrc pattern=3 ! textoverlay font-desc=\"Sans 24\" "
                 "text=\"Connect to "+  localIp + ":"+ QString::number(serveur->serverPort()) + "\" shaded-background=true "
                                                                                                "! videoconvert ! video/x-raw , width="+ QString::number(screen.width()) +", height="+ QString::number(screen.height()) + " ! ximagesink sync=false";
#endif

    }
    qDebug() << toLaunch;

    err = NULL;
    pipeline = gst_parse_launch(toLaunch.toUtf8(), &err);
    gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if(nbSender>0)
    {sendToAllConnected("restartSending@"+ QString::number(nbSender) );} // tous les client doivent recommencer leur envoi
}
コード例 #28
0
gint
main (gint argc, gchar ** argv)
{
  GdkWindow *video_window_xwindow;
  GtkWidget *window, *video_window;
  GstElement *pipeline, *src, *sink;
  GstStateChangeReturn sret;
  gulong embed_xid = 0;
  gboolean force_aspect = FALSE, draw_borders = FALSE;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  if (argc) {
    gint arg;
    for (arg = 0; arg < argc; arg++) {
      if (!strcmp (argv[arg], "-a"))
        force_aspect = TRUE;
      else if (!strcmp (argv[arg], "-b"))
        draw_borders = TRUE;
      else if (!strcmp (argv[arg], "-v"))
        verbose = TRUE;
    }
  }

  /* prepare the pipeline */

  pipeline = gst_pipeline_new ("xvoverlay");
  src = gst_element_factory_make ("videotestsrc", NULL);
  sink = gst_element_factory_make ("xvimagesink", NULL);
  gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
  gst_element_link (src, sink);

  g_object_set (G_OBJECT (sink), "handle-events", FALSE,
      "force-aspect-ratio", force_aspect, "draw-borders", draw_borders, NULL);

  /* prepare the ui */

  window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (window), "delete-event",
      G_CALLBACK (window_closed), (gpointer) pipeline);
  gtk_window_set_default_size (GTK_WINDOW (window), 320, 240);

  video_window = gtk_drawing_area_new ();
  gtk_widget_set_double_buffered (video_window, FALSE);
  gtk_container_add (GTK_CONTAINER (window), video_window);

  /* show the gui and play */
  gtk_widget_show_all (window);

  /* realize window now so that the video window gets created and we can
   * obtain its XID before the pipeline is started up and the videosink
   * asks for the XID of the window to render onto */
  gtk_widget_realize (window);

  video_window_xwindow = gtk_widget_get_window (video_window);
  embed_xid = GDK_WINDOW_XID (video_window_xwindow);
  if (verbose) {
    g_print ("Window realize: got XID %lu\n", embed_xid);
  }

  /* we know what the video sink is in this case (xvimagesink), so we can
   * just set it directly here now (instead of waiting for a
   * prepare-window-handle element message in a sync bus handler and setting
   * it there) */
  gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), embed_xid);

  anim_state.overlay = GST_VIDEO_OVERLAY (sink);
  anim_state.widget = video_window;
  anim_state.w = 320;
  anim_state.h = 240;
  anim_state.a = 0.0;
  anim_state.p = (G_PI + G_PI) / 200.0;

  handle_resize_cb (video_window, NULL, sink);
  g_signal_connect (video_window, "configure-event",
      G_CALLBACK (handle_resize_cb), NULL);
  g_signal_connect (video_window, "draw", G_CALLBACK (handle_draw_cb), NULL);

  g_timeout_add (50, (GSourceFunc) animate_render_rect, NULL);

  /* run the pipeline */
  sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (sret == GST_STATE_CHANGE_FAILURE)
    gst_element_set_state (pipeline, GST_STATE_NULL);
  else {
    anim_state.running = TRUE;
    gtk_main ();
  }

  gst_object_unref (pipeline);
  return 0;
}
コード例 #29
0
static GstBusSyncReply
bus_sync_handler (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
  const GstStructure *structure;
  gint64 position, length;
  GstFormat format = GST_FORMAT_TIME;
  const GValue *x_value, *y_value;
  gint x, i, y;
  /* select msg */
  if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT ||
      !gst_structure_has_name (gst_message_get_structure (message),
          "hand-gesture"))
    return GST_BUS_PASS;

  /* parse msg structure */
  structure = gst_message_get_structure (message);

  /* if PALM gesture detected */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "hand-gesture") == 0 &&
      strcmp (gst_structure_get_string (structure, "gesture"), "palm") == 0) {
    /* media operation - closed palm to stop media play */
    gst_element_set_state (playbin, GST_STATE_PAUSED);
  }

  /* if FIST gesture detected */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "hand-gesture") == 0 &&
      strcmp (gst_structure_get_string (structure, "gesture"), "fist") == 0) {
    /* print message type and structure name */
    g_print ("%s{{%s}}\n", gst_message_type_get_name (message->type),
        gst_structure_get_name (structure));
    /* print msg structure names&values */
    for (i = 0; i < gst_structure_n_fields (structure); i++) {
      const gchar *name = gst_structure_nth_field_name (structure, i);
      GType type = gst_structure_get_field_type (structure, name);
      const GValue *value = gst_structure_get_value (structure, name);
      type == G_TYPE_STRING ?
          g_print ("-%s[%s]{%s}\n", name, g_type_name (type),
          g_value_get_string (value)) : g_print ("-%s[%s]{%d}\n", name,
          g_type_name (type), g_value_get_uint (value));
    }
    g_print ("\n");

    /* get X,Y positions in frame */
    x_value = gst_structure_get_value (structure, "x");
    x = g_value_get_uint (x_value);
    y_value = gst_structure_get_value (structure, "y");
    y = g_value_get_uint (y_value);

    /* set object volumes [0-10] based on Y */
    g_object_set (G_OBJECT (playbin), "volume", (gdouble) (10 - y / 24), NULL);

    /* seek playback positions */
    gst_element_query_duration (playbin, format, &length);
    /* Width = 320 is specified in caps */
    position = (gint64) length *x / 320;
    gst_element_set_state (playbin, GST_STATE_PAUSED);
    gst_element_seek (GST_ELEMENT (playbin),
        1.0,
        format,
        GST_SEEK_FLAG_FLUSH,
        GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
    gst_element_set_state (GST_ELEMENT (playbin), GST_STATE_PLAYING);
  }

  gst_message_unref (message);
  return GST_BUS_DROP;
}
コード例 #30
0
int
main (int argc, char *argv[])
{
  GstElement *bin;
  GstElement *src, *capsfilter, *equalizer, *spectrum, *audioconvert, *sink;
  GstCaps *caps;
  GstBus *bus;
  GtkWidget *appwindow, *vbox, *hbox, *widget;
  int i;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  bin = gst_pipeline_new ("bin");

  /* White noise */
  src = gst_element_factory_make ("audiotestsrc", "src");
  g_object_set (G_OBJECT (src), "wave", 5, "volume", 0.8, NULL);

  /* Force float32 samples */
  capsfilter = gst_element_factory_make ("capsfilter", "capsfilter");
  caps =
      gst_caps_new_simple ("audio/x-raw-float", "width", G_TYPE_INT, 32, NULL);
  g_object_set (capsfilter, "caps", caps, NULL);

  equalizer = gst_element_factory_make ("equalizer-nbands", "equalizer");
  g_object_set (G_OBJECT (equalizer), "num-bands", NBANDS, NULL);

  spectrum = gst_element_factory_make ("spectrum", "spectrum");
  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,
      "message", TRUE, "interval", 500 * GST_MSECOND, NULL);

  audioconvert = gst_element_factory_make ("audioconvert", "audioconvert");

  sink = gst_element_factory_make ("autoaudiosink", "sink");

  gst_bin_add_many (GST_BIN (bin), src, capsfilter, equalizer, spectrum,
      audioconvert, sink, NULL);
  if (!gst_element_link_many (src, capsfilter, equalizer, spectrum,
          audioconvert, sink, NULL)) {
    fprintf (stderr, "can't link elements\n");
    exit (1);
  }

  bus = gst_element_get_bus (bin);
  gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  appwindow = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  g_signal_connect (G_OBJECT (appwindow), "destroy",
      G_CALLBACK (on_window_destroy), NULL);
  vbox = gtk_vbox_new (FALSE, 6);

  drawingarea = gtk_drawing_area_new ();
  gtk_widget_set_size_request (drawingarea, spect_bands, spect_height);
  g_signal_connect (G_OBJECT (drawingarea), "configure-event",
      G_CALLBACK (on_configure_event), (gpointer) spectrum);
  gtk_box_pack_start (GTK_BOX (vbox), drawingarea, TRUE, TRUE, 0);

  hbox = gtk_hbox_new (FALSE, 20);

  for (i = 0; i < NBANDS; i++) {
    GObject *band;
    gdouble freq;
    gdouble bw;
    gdouble gain;
    gchar *label;
    GtkWidget *frame, *scales_hbox;

    band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (equalizer), i);
    g_assert (band != NULL);
    g_object_get (band, "freq", &freq, NULL);
    g_object_get (band, "bandwidth", &bw, NULL);
    g_object_get (band, "gain", &gain, NULL);

    label = g_strdup_printf ("%d Hz", (int) (freq + 0.5));
    frame = gtk_frame_new (label);
    g_free (label);

    scales_hbox = gtk_hbox_new (FALSE, 6);

    widget = gtk_vscale_new_with_range (-24.0, 12.0, 0.5);
    gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE);
    gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP);
    gtk_range_set_value (GTK_RANGE (widget), gain);
    gtk_widget_set_size_request (widget, 25, 150);
    g_signal_connect (G_OBJECT (widget), "value-changed",
        G_CALLBACK (on_gain_changed), (gpointer) band);
    gtk_box_pack_start (GTK_BOX (scales_hbox), widget, FALSE, FALSE, 0);

    widget = gtk_vscale_new_with_range (0.0, 20000.0, 5.0);
    gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE);
    gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP);
    gtk_range_set_value (GTK_RANGE (widget), bw);
    gtk_widget_set_size_request (widget, 25, 150);
    g_signal_connect (G_OBJECT (widget), "value-changed",
        G_CALLBACK (on_bandwidth_changed), (gpointer) band);
    gtk_box_pack_start (GTK_BOX (scales_hbox), widget, TRUE, TRUE, 0);

    widget = gtk_vscale_new_with_range (20.0, 20000.0, 5.0);
    gtk_scale_set_draw_value (GTK_SCALE (widget), TRUE);
    gtk_scale_set_value_pos (GTK_SCALE (widget), GTK_POS_TOP);
    gtk_range_set_value (GTK_RANGE (widget), freq);
    gtk_widget_set_size_request (widget, 25, 150);
    g_signal_connect (G_OBJECT (widget), "value-changed",
        G_CALLBACK (on_freq_changed), (gpointer) band);
    gtk_box_pack_start (GTK_BOX (scales_hbox), widget, TRUE, TRUE, 0);

    gtk_container_add (GTK_CONTAINER (frame), scales_hbox);

    gtk_box_pack_start (GTK_BOX (hbox), frame, TRUE, TRUE, 0);
  }

  gtk_box_pack_start (GTK_BOX (vbox), hbox, TRUE, TRUE, 0);

  gtk_container_add (GTK_CONTAINER (appwindow), vbox);
  gtk_widget_show_all (appwindow);

  gst_element_set_state (bin, GST_STATE_PLAYING);
  gtk_main ();
  gst_element_set_state (bin, GST_STATE_NULL);

  gst_object_unref (bin);

  return 0;
}