Exemplo n.º 1
0
void AudioGrabber::init_device()
{

	  GstElement *src, *volume, *audioconvert, *spectrum, *sink;
	  GstBus *bus;
	  GstCaps *caps;


	  gst_init (NULL, NULL);

	  bin = gst_pipeline_new ("bin");

	  src = gst_element_factory_make ("alsasrc", "src");
	  g_object_set (G_OBJECT (src), "device", _deviceName.c_str(), NULL);
	  audioconvert = gst_element_factory_make ("audioconvert", NULL);
	  volume = gst_element_factory_make ("volume", "volume");
	  g_object_set (G_OBJECT (volume), "volume", _volume_gain , NULL);
	  g_assert (audioconvert);

	  spectrum = gst_element_factory_make ("spectrum", "spectrum");
	  g_object_set (G_OBJECT (spectrum), "bands", _num_bands, "threshold", _spectrum_threshold,
	      "post-messages", TRUE, "message-phase", FALSE, NULL);

	  sink = gst_element_factory_make ("fakesink", "sink");
	  g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);

	  gst_bin_add_many (GST_BIN (bin), src, audioconvert, volume, spectrum, sink, NULL);

	  caps = gst_caps_new_simple ("audio/x-raw-int",
	      "rate", G_TYPE_INT, _freq, "channels", G_TYPE_INT, _num_channels,  "depth", G_TYPE_INT, 16,NULL);

	  if (!gst_element_link (src, audioconvert)) {
	    fprintf (stderr, "can't link elements 1\n");
	    exit (1);
	  }

	  if (!gst_element_link_filtered (audioconvert, volume, caps)) {
	    fprintf (stderr, "can't link elements 2\n");
	    exit (1);
	  }

	  if (!gst_element_link_filtered (volume, spectrum, caps)) {
	      fprintf (stderr, "can't link elements 2\n");
	      exit (1);
	    }

	  if (!gst_element_link (spectrum, sink)) {
	    fprintf (stderr, "can't link elements 3\n");
	    exit (1);
	  }

	  gst_caps_unref (caps);

	  bus = gst_element_get_bus (bin);
	  gst_bus_add_watch (bus, message_handler, NULL);
	  gst_object_unref (bus);
}
Exemplo n.º 2
0
/* Creates simple bin that plays a background JPEG image or sequence 
*  with 30fps. Should be used as the first input of the video mixer. 
*  Scaling should be set when linked to the mixer element! 
*  bin elements : multifilesrc ! jpegdec ! videoscale ! queue
*  src ghost pad is added as an output to the bin.*/
GstElement* bkgbin_new(CustomData *data)
{
   GstElement *bkgbin,*bkgsrc,*bkgdec,*bkgscale,*bkgqueue,*bkgfreeze;
   GstCaps *freeze_caps,*scale_caps;
   GstPad *pad,*dec_pad_sink;
   //Create bin, elements, caps and link everything.
   bkgbin=gst_bin_new("bkgbin");
   bkgsrc=gst_element_factory_make("multifilesrc","bkgsrc");
   bkgdec=gst_element_factory_make("jpegdec","bkgdec");
   bkgfreeze=gst_element_factory_make("imagefreeze","bkgfreeze");
   bkgscale=gst_element_factory_make("videoscale","bkgscale");
   bkgqueue=gst_element_factory_make("queue","bkgqueue");
   gst_bin_add_many(GST_BIN(bkgbin),bkgsrc,bkgdec,bkgscale,bkgqueue,bkgfreeze,NULL);
   freeze_caps=gst_caps_new_simple("video/x-raw",
                                "framerate",GST_TYPE_FRACTION,FRAMES_PER_SEC,1,
                                NULL);
   scale_caps=gst_caps_new_simple("video/x-raw",
//                                "format",G_TYPE_STRING,"YUV",
//                                "alpha",G_TYPE_INT,0,
//                                "framerate",GST_TYPE_FRACTION,FRAMES_PER_SEC,1,
                                "width",G_TYPE_INT,CAMERA_RES_WIDTH,
                                "height",G_TYPE_INT,CAMERA_RES_HEIGHT,
                                NULL);
//   gst_element_link(bkgsrc,bkgdec);
   gst_element_link_many(bkgsrc,bkgdec,NULL);
   /* decodebin's src pad is a sometimes pad - it gets created dynamically */
//   g_signal_connect(bkgdec,"pad-added",G_CALLBACK(on_new_decoded_pad),bkgscale);
   gst_element_link(bkgdec,bkgscale);
   gst_element_link_filtered(bkgscale,bkgfreeze,scale_caps);
   gst_element_link_filtered(bkgfreeze,bkgqueue,freeze_caps);
//   gst_element_link_filtered(bkgscale,bkgqueue,scale_caps);
   gst_caps_unref(scale_caps);
   gst_caps_unref(freeze_caps);
   //Create the ghost src pad for the bin.
   pad=gst_element_get_static_pad(bkgqueue,"src");
   gst_element_add_pad(bkgbin,gst_ghost_pad_new("src",pad));
   gst_object_unref(pad);
   
   /* set initial parameters */
   g_object_set(G_OBJECT(bkgsrc),"location",data->config[data->selected_config].background,
          "loop",TRUE,NULL);
//   g_object_set(G_OBJECT(bkgqueue),"leaky",2,NULL);
   
   /* set eos handler function */
   dec_pad_sink=gst_element_get_static_pad(bkgdec,"sink");
//   gst_pad_set_event_function(dec_pad_sink,eos_callback);

   return bkgbin;
}
Exemplo n.º 3
0
int
spectrum_run (int argc, char *argv[])
{
  GstElement *bin;
  GstElement *src, *audioconvert, *spectrum, *sink;
  GstBus *bus;
  GstCaps *caps;
  GMainLoop *loop;

  gst_init (&argc, &argv);

//	g_print("Enter Upper Frequency Bound:");
//	scanf("%d" , &inputfreq);
//	AUDIOFREQ = (inputfreq * 2);
//	g_print("Enter Number of Frequncy Bands:");
//	scanf("%d" , &spect_bands);

  bin = gst_pipeline_new ("bin");

  src = gst_element_factory_make ("audiotestsrc", "src");
  g_object_set (G_OBJECT (src), "wave", 0, "freq", 6000.0, NULL);
  audioconvert = gst_element_factory_make ("audioconvert", NULL);
  g_assert (audioconvert);

  spectrum = gst_element_factory_make ("spectrum", "spectrum");
  g_object_set (G_OBJECT (spectrum), "bands", spect_bands, "threshold", -80,
      "message", TRUE, "message-phase", TRUE, NULL);

  sink = gst_element_factory_make ("fakesink", "sink");
  g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);

  gst_bin_add_many (GST_BIN (bin), src, audioconvert, spectrum, sink, NULL);

  caps = gst_caps_new_simple ("audio/x-raw-int",
      "rate", G_TYPE_INT, AUDIOFREQ, NULL);

  if (!gst_element_link (src, audioconvert) ||
      !gst_element_link_filtered (audioconvert, spectrum, caps) ||
      !gst_element_link (spectrum, sink)) {
    fprintf (stderr, "can't link elements\n");
    exit (1);
  }
  gst_caps_unref (caps);

  bus = gst_element_get_bus (bin);
  gst_bus_add_watch (bus, message_handler, NULL);
  gst_object_unref (bus);

  gst_element_set_state (bin, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);
	
  gst_element_set_state (bin, GST_STATE_NULL);

  gst_object_unref (bin);
	
  return 0;
}
Exemplo n.º 4
0
void CvCapture_GStreamer::restartPipeline()
{
    CV_FUNCNAME("icvRestartPipeline");

    __BEGIN__;

    printf("restarting pipeline, going to ready\n");

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
        return;
    }

    printf("ready, relinking\n");

    gst_element_unlink(uridecodebin, color);
    printf("filtering with %s\n", gst_caps_to_string(caps));
    gst_element_link_filtered(uridecodebin, color, caps);

    printf("relinked, pausing\n");

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
        return;
    }

    printf("state now paused\n");

    __END__;
}
Exemplo n.º 5
0
gint
main (gint argc, gchar ** argv)
{
  GstCaps *caps;
  GstElement *sink, *identity;
  GstElement *pipeline;

  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");
  g_assert (pipeline);
  identity = gst_element_factory_make ("identity", NULL);
  g_assert (identity);
  sink = gst_element_factory_make ("fakesink", NULL);
  g_assert (sink);
  gst_bin_add_many (GST_BIN (pipeline), identity, sink, NULL);
  gst_element_link_filtered (identity, sink,
      gst_caps_new_simple ("audio/x-raw-int", NULL));
  caps = gst_pad_get_caps (gst_element_get_pad (identity, "sink"));
  g_print ("caps:         %s\n", gst_caps_to_string (caps));
  g_assert (!gst_caps_is_any (caps));

  caps = gst_pad_get_allowed_caps (gst_element_get_pad (identity, "sink"));
  g_print ("allowed caps: %s\n", gst_caps_to_string (caps));
  /* get_allowed_caps doesn't mean anything if you aren't connected */
  g_assert (!caps);

  return 0;
}
Exemplo n.º 6
0
static gboolean link_elements_with_filter (GstElement *element1, GstElement *element2, GstCaps *caps)
{
  gboolean link_ok;

  link_ok = gst_element_link_filtered (element1, element2, caps);

  if (!link_ok) {
    g_warning ("No s'ha pogut linkar element1 i element2 amb filtre demanat");
  }

  return link_ok;
}
Exemplo n.º 7
0
/*
 * Method: link_filtered(element, caps)
 * element: a Gst::Element object.
 * caps: a Gst::Caps object.
 *
 * Links this element (source) to the provided element (destination), 
 * filtered by the given caps.
 *
 * The method looks for existing pads and request pads that 
 * aren't linked yet. If multiple links are possible, only one 
 * is established.
 *
 * Returns: the destination element, or nil if the link failed.
 */
static VALUE
rg_link_filtered(VALUE self, VALUE other_element, VALUE rcaps)
{
    GstElement *element1, *element2;
    GstCaps *caps;

    element1 = SELF(self);
    element2 = SELF(other_element);
    caps = RGST_CAPS(rcaps);
    return gst_element_link_filtered(element1, element2, caps)
        ? other_element 
        : Qnil;
}
Exemplo n.º 8
0
video_server_t * video_server_create(config_t *conf)
{
    video_server_t * server = (video_server_t *)malloc(sizeof(video_server_t));


    /* Create gstreamer elements */
    server->pipeline  = gst_pipeline_new("bonecam-video");

    create_input_source(server, conf);
    create_h264_caps(server, conf);

    server->vqueue = gst_element_factory_make("queue", "video-queue");
    server->parser = gst_element_factory_make("h264parse",  "video-parser");
    server->payloader = gst_element_factory_make("rtph264pay", "video-rtppay");

    create_udp_sink(server, conf);


    if (!server->pipeline || !server->source || !server->parser ||
        !server->vqueue || !server->payloader || !server->udpsink)
    {
        fprintf(stderr, "%s: At least one element could not be created.\n",
                __func__);
    }

    /* Add elements to pipeline */
    gst_bin_add_many(GST_BIN(server->pipeline),
                     server->source, server->vqueue, server->parser,
                     server->payloader, server->udpsink, NULL);

    /* Link elements */
    gst_element_link_pads(server->source, "vidsrc", server->vqueue, "sink");

    if (!gst_element_link_filtered(server->vqueue,
                                   server->parser,
                                   server->h264caps))
    {
        fprintf(stderr, "%s: Failed to link elements\n", __func__);
    }

    gst_caps_unref(server->h264caps);

    if (!gst_element_link_many(server->parser, server->payloader,
                               server->udpsink, NULL))
        fprintf(stderr, "%s: Failed to link elements\n", __func__);

    /* keep reference to config */
    server->conf = conf;

    return server;
}
Exemplo n.º 9
0
bool TrackAnalyser::prepare()
{
        GstElement *dec, *conv, *sink, *cutter, *audio, *analysis;
        GstPad *audiopad;
        GstCaps *caps;

        caps = gst_caps_new_simple ("audio/x-raw-int",
                                    "channels", G_TYPE_INT, 2, NULL);

        pipeline = gst_pipeline_new ("pipeline");
        bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));


        dec = gst_element_factory_make ("decodebin2", "decoder");
        g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad_ta), this);
        gst_bin_add (GST_BIN (pipeline), dec);

        audio = gst_bin_new ("audiobin");
        conv = gst_element_factory_make ("audioconvert", "conv");
        audiopad = gst_element_get_static_pad (conv, "sink");
        analysis = gst_element_factory_make ("rganalysis", "analysis");
        cutter = gst_element_factory_make ("cutter", "cutter");
        sink = gst_element_factory_make ("fakesink", "sink");

        g_object_set (analysis, "message", TRUE, NULL);
        g_object_set (analysis, "num-tracks", 1, NULL);
        g_object_set (cutter, "threshold-dB", -25.0, NULL);

        gst_bin_add_many (GST_BIN (audio), conv, analysis, cutter, sink, NULL);
        gst_element_link (conv, analysis);
        gst_element_link_filtered (analysis, cutter, caps);
        gst_element_link (cutter, sink);
        gst_element_add_pad (audio, gst_ghost_pad_new ("sink", audiopad));

        gst_bin_add (GST_BIN (pipeline), audio);

        GstElement *l_src;
        l_src = gst_element_factory_make ("filesrc", "localsrc");
        gst_bin_add_many (GST_BIN (pipeline), l_src, NULL);
        gst_element_set_state (l_src, GST_STATE_NULL);
        gst_element_link ( l_src,dec);

        gst_object_unref (audiopad);

        gst_bus_set_sync_handler (bus, bus_cb, this);

        return pipeline;
}
Exemplo n.º 10
0
static gboolean
acam_webcam_setup_create_photo_save_bin (acam_webcam_device_s *acam_webcam_device, GError **error)
{
	GstElement *csp_photo_save_bin;

	gboolean ok;
	GstPad  *pad;
	GstCaps *caps;

	/* Create a new bin (photo bin) */
	acam_webcam_device->photo_save_bin = gst_bin_new ("photo_save_bin");

	if ((csp_photo_save_bin = gst_element_factory_make ("ffmpegcolorspace", "csp_photo_save_bin")) == NULL) {
		g_print ("Element not found: ffmpegcolorspace\n");
	}
	if ((acam_webcam_device->photo_sink = gst_element_factory_make ("fakesink", "photo_sink")) == NULL) {
		g_print ("Element not found: fakesink\n");
	}

	if (error != NULL && *error != NULL)
		return FALSE;

	gst_bin_add_many (GST_BIN (acam_webcam_device->photo_save_bin),
	    				csp_photo_save_bin,
						acam_webcam_device->photo_sink, NULL);

	/* Add ghostpad */
	pad = gst_element_get_pad (csp_photo_save_bin, "sink");
	gst_element_add_pad (acam_webcam_device->photo_save_bin, gst_ghost_pad_new ("sink", pad));
	gst_object_unref (GST_OBJECT (pad));

	caps = gst_caps_new_simple ("video/x-raw-rgb",
								"bpp", G_TYPE_INT, 24,
								"depth", G_TYPE_INT, 24,
								NULL);
	ok = gst_element_link_filtered (csp_photo_save_bin, acam_webcam_device->photo_sink, caps);
	gst_caps_unref (caps);

	g_object_set (G_OBJECT (acam_webcam_device->photo_sink), "signal-handoffs", TRUE, NULL);

	if (!ok)
		g_error ("Unable to create photo save pipeline");

	return TRUE;
}
Exemplo n.º 11
0
int
main (int argc, char *argv[])
{
  GstElement *audiotestsrc, *audioconvert, *level, *fakesink;
  GstElement *pipeline;
  GstCaps *caps;
  GstBus *bus;
  gint watch_id;
  GMainLoop *loop;

  gst_init (&argc, &argv);

  caps = gst_caps_from_string ("audio/x-raw-int,channels=2");

  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);
  audiotestsrc = gst_element_factory_make ("audiotestsrc", NULL);
  g_assert (audiotestsrc);
  audioconvert = gst_element_factory_make ("audioconvert", NULL);
  g_assert (audioconvert);
  level = gst_element_factory_make ("level", NULL);
  g_assert (level);
  fakesink = gst_element_factory_make ("fakesink", NULL);
  g_assert (fakesink);

  gst_bin_add_many (GST_BIN (pipeline), audiotestsrc, audioconvert, level,
      fakesink, NULL);
  g_assert (gst_element_link (audiotestsrc, audioconvert));
  g_assert (gst_element_link_filtered (audioconvert, level, caps));
  g_assert (gst_element_link (level, fakesink));

  /* make sure we'll get messages */
  g_object_set (G_OBJECT (level), "message", TRUE, NULL);

  bus = gst_element_get_bus (pipeline);
  watch_id = gst_bus_add_watch (bus, message_handler, NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  return 0;
}
Exemplo n.º 12
0
void linkSourceAndSink(){
	GstCaps *caps = gst_caps_new_simple (
		"audio/x-raw-int",	     
		"rate",       G_TYPE_INT, 22050,
		"depth",      G_TYPE_INT, 16,
		"endianness", G_TYPE_INT, 4321,
		"channels",   G_TYPE_INT, 1,
		"signed",     G_TYPE_BOOLEAN, TRUE,
		NULL);

	gboolean link_ok = gst_element_link_filtered (source, sink, caps);

	if (!link_ok) {
    	g_printerr ("Failed to link source and sink.");
		exit(-2);
  	}
	
	gst_caps_unref (caps);
}
GstElement *
audiotest_bin_src (const gchar * name, guint64 start,
    gint64 duration, guint priority, gboolean intaudio)
{
  GstElement *source = NULL;
  GstElement *identity = NULL;
  GstElement *audiotestsrc = NULL;
  GstElement *audioconvert = NULL;
  GstElement *bin = NULL;
  GstCaps *caps;
  GstPad *srcpad = NULL;

  audiotestsrc = gst_element_factory_make_or_warn ("audiotestsrc", NULL);
  identity = gst_element_factory_make_or_warn ("identity", NULL);
  bin = gst_bin_new (NULL);
  source = new_nle_src (name, start, duration, priority);
  audioconvert = gst_element_factory_make_or_warn ("audioconvert", NULL);

  if (intaudio)
    caps = gst_caps_from_string ("audio/x-raw,format=(string)S16LE");
  else
    caps = gst_caps_from_string ("audio/x-raw,format=(string)F32LE");

  gst_bin_add_many (GST_BIN (bin), audiotestsrc, audioconvert, identity, NULL);
  gst_element_link_pads_full (audiotestsrc, "src", audioconvert, "sink",
      GST_PAD_LINK_CHECK_NOTHING);
  fail_if ((gst_element_link_filtered (audioconvert, identity, caps)) != TRUE);

  gst_caps_unref (caps);

  gst_bin_add (GST_BIN (source), bin);

  srcpad = gst_element_get_static_pad (identity, "src");

  gst_element_add_pad (bin, gst_ghost_pad_new ("src", srcpad));

  gst_object_unref (srcpad);

  return source;
}
Exemplo n.º 14
0
static int set_links (ServerData *app)
{
  GstCaps *a_caps = NULL;
  int error_flag = 0;
  
  if (!gst_element_link_many (app->muxer, app->sink_buffer, app->sink, NULL))
  {
    fprintf (stderr, "Could not link elements: muxer --> buffer --> sink\n");
    error_flag = 1;
  }

  if (!gst_element_link (app->a_enc_buffer, app->a_encoder))
  {
    fprintf (stderr, "Could not link elements: queue --> audio encoder\n");
    error_flag = 1;
  }

  if (!gst_element_link (app->v_enc_buffer, app->v_encoder))
  {
    fprintf (stderr, "Could not link elements: queue --> video encoder\n");
    error_flag = 1;
  }

  error_flag = link_encoders_muxer (app);
  
  a_caps = gst_caps_new_simple ("audio/x-raw",
      "format", G_TYPE_STRING, "F32LE",
      "rate", G_TYPE_INT, 48000,
      NULL);

  if (!gst_element_link_filtered (app->a_filter, app->a_enc_buffer, a_caps))
  {
    fprintf (stderr, "Could not link audio filter and audio enc buffer\n");
    error_flag = 1;
  }

  return error_flag;
}
Exemplo n.º 15
0
VideoDataOutput::VideoDataOutput(Backend *backend, QObject *parent)
    : QObject(parent),
      MediaNode(backend, VideoSink),
      m_frontend(0)
{
    static int count = 0;
    m_name = "VideoDataOutput" + QString::number(count++);

    m_queue = gst_bin_new(NULL);
    gst_object_ref_sink(GST_OBJECT(m_queue));

    GstElement* sink = gst_element_factory_make("fakesink", NULL);
    GstElement* queue = gst_element_factory_make("queue", NULL);
    GstElement* convert = gst_element_factory_make("videoconvert", NULL);

    g_signal_connect(sink, "handoff", G_CALLBACK(processBuffer), this);
    g_object_set(G_OBJECT(sink), "signal-handoffs", true, NULL);

        // Save ourselves a metric crapton of work by simply requesting
        // a format native to Qt.
    GstCaps *caps = gst_caps_new_simple("video/x-raw",
                                        "format = (string)", G_TYPE_STRING, GST_VIDEO_NE(RGB),
                                        NULL);

    gst_bin_add_many(GST_BIN(m_queue), sink, convert, queue, NULL);
    gst_element_link(queue, convert);
    gst_element_link_filtered(convert, sink, caps);
    gst_caps_unref(caps);

    GstPad *inputpad = gst_element_get_static_pad(queue, "sink");
    gst_element_add_pad(m_queue, gst_ghost_pad_new("sink", inputpad));
    gst_object_unref(inputpad);

    g_object_set(G_OBJECT(sink), "sync", true, NULL);

    m_isValid = true;
}
Exemplo n.º 16
0
static gboolean link_elements_with_filter (GstElement *element1, GstElement *element2) {
    gboolean link_ok;
    GstCaps *caps;

    caps = gst_caps_new_simple (
#if GST_VERSION_MAJOR == (0)
            "audio/x-raw-float",
#else
            "audio/x-raw",
#endif
            "channels", G_TYPE_INT, 2,
            NULL);

    link_ok = gst_element_link_filtered (element1, element2, caps);
    gst_caps_unref (caps);

    if (!link_ok) {
        g_warning ("Failed to link %s to %s",
                GST_ELEMENT_NAME (element1),
                GST_ELEMENT_NAME (element2));
    }

    return link_ok;
}
int
main (int argc, char *argv[])
{
  GtkWidget *window, *window_control;
  GtkWidget *button_state_null, *button_state_ready;
  GtkWidget *button_state_paused, *button_state_playing;
  GtkWidget *grid, *area;
  GstElement *pipeline;
  GstElement *videosrc, *videosink;
  GstStateChangeReturn ret;
  GstCaps *caps;
  GstBus *bus;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");

  //window that contains an area where the video is drawn
  window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_default_size (GTK_WINDOW (window), 640, 480);
  gtk_window_move (GTK_WINDOW (window), 300, 10);
  gtk_window_set_title (GTK_WINDOW (window), "gtkgstwidget");

  //window to control the states
  window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
  gtk_window_move (GTK_WINDOW (window_control), 10, 10);
  grid = gtk_grid_new ();
  gtk_container_add (GTK_CONTAINER (window_control), grid);

  //control state null
  button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
  g_signal_connect (G_OBJECT (button_state_null), "clicked",
      G_CALLBACK (button_state_null_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_null, 0, 1, 1, 1);
  gtk_widget_show (button_state_null);

  //control state ready
  button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
  g_signal_connect (G_OBJECT (button_state_ready), "clicked",
      G_CALLBACK (button_state_ready_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_ready, 0, 2, 1, 1);
  gtk_widget_show (button_state_ready);

  //control state paused
  button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
  g_signal_connect (G_OBJECT (button_state_paused), "clicked",
      G_CALLBACK (button_state_paused_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_paused, 0, 3, 1, 1);
  gtk_widget_show (button_state_paused);

  //control state playing
  button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
  g_signal_connect (G_OBJECT (button_state_playing), "clicked",
      G_CALLBACK (button_state_playing_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_playing, 0, 4, 1, 1);
  gtk_widget_show (button_state_playing);

  gtk_widget_show (grid);
  gtk_widget_show (window_control);

  g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK (destroy_cb),
      pipeline);

  //configure the pipeline
  videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc");
  videosink = gst_element_factory_make ("gtksink", "gtksink");

  g_object_get (videosink, "widget", &area, NULL);
  gtk_container_add (GTK_CONTAINER (window), area);
  g_object_unref (area);

  gtk_widget_realize (area);

  caps = gst_caps_new_simple ("video/x-raw",
      "width", G_TYPE_INT, 640,
      "height", G_TYPE_INT, 480, "format", G_TYPE_STRING, "BGRA", NULL);

  gst_bin_add_many (GST_BIN (pipeline), videosrc, videosink, NULL);

  if (!gst_element_link_filtered (videosrc, videosink, caps)) {
    gst_caps_unref (caps);
    g_warning ("Failed to link videosrc to glfiltercube!\n");
    return -1;
  }
  gst_caps_unref (caps);

  //set window id on this event
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb), pipeline);
  gst_object_unref (bus);

  //start
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_print ("Failed to start up pipeline!\n");
    return -1;
  }

  gtk_widget_show_all (window);

  gtk_main ();

  gst_deinit ();

  return 0;
}
Exemplo n.º 18
0
QString Chromaprinter::CreateFingerprint() {
  Q_ASSERT(QThread::currentThread() != qApp->thread());

  buffer_.open(QIODevice::WriteOnly);

  GMainContext* context = g_main_context_new();
  g_main_context_push_thread_default(context);
  event_loop_ = g_main_loop_new(context, FALSE);

  pipeline_ = gst_pipeline_new("pipeline");
  GstElement* src = CreateElement("filesrc", pipeline_);
  GstElement* decode = CreateElement("decodebin2", pipeline_);
  GstElement* convert = CreateElement("audioconvert", pipeline_);
  GstElement* resample = CreateElement("audioresample", pipeline_);
  GstElement* sink = CreateElement("appsink", pipeline_);

  if (!src || !decode || !convert || !resample || !sink) {
    return QString();
  }

  convert_element_ = convert;

  // Connect the elements
  gst_element_link_many(src, decode, nullptr);
  gst_element_link_many(convert, resample, nullptr);

  // Chromaprint expects mono floats at a sample rate of 11025Hz.
  GstCaps* caps = gst_caps_new_simple(
      "audio/x-raw-int", "width", G_TYPE_INT, 16, "channels", G_TYPE_INT,
      kDecodeChannels, "rate", G_TYPE_INT, kDecodeRate, nullptr);
  gst_element_link_filtered(resample, sink, caps);
  gst_caps_unref(caps);

  GstAppSinkCallbacks callbacks;
  memset(&callbacks, 0, sizeof(callbacks));
  callbacks.new_buffer = NewBufferCallback;
  gst_app_sink_set_callbacks(reinterpret_cast<GstAppSink*>(sink), &callbacks,
                             this, nullptr);
  g_object_set(G_OBJECT(sink), "sync", FALSE, nullptr);
  g_object_set(G_OBJECT(sink), "emit-signals", TRUE, nullptr);

  // Set the filename
  g_object_set(src, "location", filename_.toUtf8().constData(), nullptr);

  // Connect signals
  CHECKED_GCONNECT(decode, "new-decoded-pad", &NewPadCallback, this);
  gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                           BusCallbackSync, this);
  guint bus_callback_id = gst_bus_add_watch(
      gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this);

  QTime time;
  time.start();

  // Start playing
  gst_element_set_state(pipeline_, GST_STATE_PLAYING);

  g_main_loop_run(event_loop_);
  g_main_loop_unref(event_loop_);
  g_main_context_unref(context);

  int decode_time = time.restart();

  buffer_.close();
  QByteArray data = buffer_.data();

  ChromaprintContext* chromaprint =
      chromaprint_new(CHROMAPRINT_ALGORITHM_DEFAULT);
  chromaprint_start(chromaprint, kDecodeRate, kDecodeChannels);
  chromaprint_feed(chromaprint, reinterpret_cast<void*>(data.data()),
                   data.size() / 2);
  chromaprint_finish(chromaprint);

  void* fprint = nullptr;
  int size = 0;
  int ret = chromaprint_get_raw_fingerprint(chromaprint, &fprint, &size);
  QByteArray fingerprint;
  if (ret == 1) {
    void* encoded = nullptr;
    int encoded_size = 0;
    chromaprint_encode_fingerprint(fprint, size, CHROMAPRINT_ALGORITHM_DEFAULT,
                                   &encoded, &encoded_size, 1);

    fingerprint.append(reinterpret_cast<char*>(encoded), encoded_size);

    chromaprint_dealloc(fprint);
    chromaprint_dealloc(encoded);
  }
  chromaprint_free(chromaprint);
  int codegen_time = time.elapsed();

  qLog(Debug) << "Decode time:" << decode_time
              << "Codegen time:" << codegen_time;

  // Cleanup
  callbacks.new_buffer = nullptr;
  gst_app_sink_set_callbacks(reinterpret_cast<GstAppSink*>(sink), &callbacks,
                             this, nullptr);
  gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                           nullptr, nullptr);
  g_source_remove(bus_callback_id);
  gst_element_set_state(pipeline_, GST_STATE_NULL);
  gst_object_unref(pipeline_);

  return fingerprint;
}
Exemplo n.º 19
0
//equivalent command line: 
//gst-launch-0.10 videotestsrc num_buffers=400 ! glupload ! gldownload ! 
//ffenc_mpeg4 ! avimux ! filesink location="record.avi"
// or
//gst-launch-0.10 videotestsrc num_buffers=400 ! glupload !  video/x-raw-gl, width=320, height=240 ! glfiltercube ! video/x-raw-gl, width=720, height=576 ! 
//gldownload ! ffenc_mpeg4 ! avimux ! filesink location="record.avi"
gint main (gint argc, gchar *argv[])
{
    GstStateChangeReturn ret;
    GstElement *pipeline, *videosrc, *glupload, *glfilterapp, *gldownload, *ffenc_mpeg4, *avimux, *filesink;
    GMainLoop *loop;
    GstBus *bus;

    /* initialization */
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    /* create elements */
    pipeline = gst_pipeline_new ("pipeline");

    /* watch for messages on the pipeline's bus (note that this will only
     * work like this when a GLib main loop is running) */
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    /* create elements */
    videosrc = gst_element_factory_make ("videotestsrc", "videotestsrc0");
    glupload  = gst_element_factory_make ("glupload", "glupload0");
    glfilterapp = gst_element_factory_make ("glfilterapp", "glfilterapp0");
    gldownload  = gst_element_factory_make ("gldownload", "gldownload0");
    ffenc_mpeg4  = gst_element_factory_make ("ffenc_mpeg4", "ffenc_mpeg40");
    avimux  = gst_element_factory_make ("avimux", "avimux0");
    filesink  = gst_element_factory_make ("filesink", "filesink0");


    if (!videosrc || !glupload || !glfilterapp || !gldownload || !ffenc_mpeg4 || !avimux || !filesink) 
    {
        g_print ("one element could not be found \n");
        return -1;
    }

    /* change video source caps */
    GstCaps *caps = gst_caps_new_simple("video/x-raw-yuv",
                                        "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
                                        "width", G_TYPE_INT, 320,
                                        "height", G_TYPE_INT, 240,
                                        "framerate", GST_TYPE_FRACTION, 25, 1,
                                        NULL) ;

    /* change video source caps */
    GstCaps *outcaps = gst_caps_new_simple("video/x-raw-yuv",
                                           "width", G_TYPE_INT, 640,
                                           "height", G_TYPE_INT, 480,
                                           NULL) ;

    /* configure elements */
    g_object_set(G_OBJECT(videosrc), "num-buffers", 400, NULL);
    g_object_set(G_OBJECT(glfilterapp), "client-reshape-callback", reshapeCallback, NULL);
    g_object_set(G_OBJECT(glfilterapp), "client-draw-callback", drawCallback, NULL);
    g_object_set(G_OBJECT(glfilterapp), "client-data", NULL, NULL);
    g_object_set(G_OBJECT(filesink), "location", "record.avi", NULL);
    
    /* add elements */
    gst_bin_add_many (GST_BIN (pipeline), videosrc, glupload, glfilterapp, gldownload, 
        ffenc_mpeg4, avimux, filesink, NULL);
    
    /* link elements */
    gboolean link_ok = gst_element_link_filtered(videosrc, glupload, caps) ;
    gst_caps_unref(caps) ;
    if(!link_ok)
    {
        g_warning("Failed to link videosrc to glupload!\n") ;
        return -1 ;
    }
    if (!gst_element_link_many(glupload, glfilterapp, gldownload, NULL)) 
    {
        g_print ("Failed to link one or more elements!\n");
        return -1;
    }
    link_ok = gst_element_link_filtered(gldownload, ffenc_mpeg4, outcaps) ;
    gst_caps_unref(outcaps) ;
    if(!link_ok)
    {
        g_warning("Failed to link glvideomaker to ffenc_mpeg4!\n") ;
        return -1 ;
    }
    if (!gst_element_link_many(ffenc_mpeg4, avimux, filesink, NULL)) 
    {
        g_print ("Failed to link one or more elements!\n");
        return -1;
    }

    
    /* run */
    ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) 
    {
        g_print ("Failed to start up pipeline!\n");

        /* check if there is an error message with details on the bus */
        GstMessage* msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
        if (msg) 
        {
          GError *err = NULL;

          gst_message_parse_error (msg, &err, NULL);
          g_print ("ERROR: %s\n", err->message);
          g_error_free (err);
          gst_message_unref (msg);
        }
        return -1;
    }

    g_main_loop_run (loop);

    /* clean up */
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);

    return 0;

}
Exemplo n.º 20
0
bool Player::prepare()
{
    //Init Gst
    //
    QString caps_value = "audio/x-raw";

      // On mac we bundle the gstreamer plugins with knowthelist
#if defined(Q_OS_DARWIN)
      QString scanner_path;
      QString plugin_path;
      QString registry_filename;

      QDir pd(QCoreApplication::applicationDirPath() + "/../plugins");
      scanner_path = QCoreApplication::applicationDirPath() + "/../plugins/gst-plugin-scanner";
      plugin_path = QCoreApplication::applicationDirPath() + "/../plugins/gstreamer";
      registry_filename = QDesktopServices::storageLocation(QDesktopServices::DataLocation) +
              QString("/gst-registry-%1-bin").arg(QCoreApplication::applicationVersion());

      if ( pd.exists())
        setenv("GST_PLUGIN_SCANNER", scanner_path.toLocal8Bit().constData(), 1);

      if ( pd.exists()) {
        setenv("GST_PLUGIN_PATH", plugin_path.toLocal8Bit().constData(), 1);
        // Never load plugins from anywhere else.
        setenv("GST_PLUGIN_SYSTEM_PATH", plugin_path.toLocal8Bit().constData(), 1);
      }

      if (!registry_filename.isEmpty()) {
        setenv("GST_REGISTRY", registry_filename.toLocal8Bit().constData(), 1);
      }
#elif defined(Q_OS_WIN32)
      QString plugin_path = QCoreApplication::applicationDirPath() + "/plugins";
      QDir pluginDir(plugin_path);
      if ( pluginDir.exists())
        _putenv_s("GST_PLUGIN_PATH", plugin_path.toLocal8Bit());

#endif

      //_putenv_s("GST_DEBUG", "*:4"); //win
      //setenv("GST_DEBUG", "*:3", 1); //unix


      gst_init (0, 0);

    //prepare

        GstElement *dec, *conv,*resample,*sink, *gain, *audio, *vol, *level, *equalizer;
        GstElement *levelout;
        GstPad *audiopad;
        GstCaps *caps;
        pipeline = gst_pipeline_new ("pipeline");
        bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));


#ifdef GST_API_VERSION_1
        dec = gst_element_factory_make ("decodebin", "decoder");
#else
        dec = gst_element_factory_make ("decodebin2", "decoder");
        caps_value = "audio/x-raw-int";
#endif
        caps = gst_caps_new_simple (caps_value.toLatin1().data(),
                                    "channels", G_TYPE_INT, 2, NULL);
        g_signal_connect (dec, "pad-added", G_CALLBACK (cb_newpad), this);
        gst_bin_add (GST_BIN (pipeline), dec);

        audio = gst_bin_new ("audiobin");
        conv = gst_element_factory_make ("audioconvert", "aconv");
        resample = gst_element_factory_make ("audioresample", "resample");
        audiopad = gst_element_get_static_pad (conv, "sink");
        gain = gst_element_factory_make ("audioamplify", "gain");
        level = gst_element_factory_make ("level", "levelintern");
        vol = gst_element_factory_make ("volume", "volume");
        levelout = gst_element_factory_make ("level", "levelout");
        equalizer = gst_element_factory_make ("equalizer-3bands", "equalizer");
        sink = gst_element_factory_make ("autoaudiosink", "sink");

        g_object_set (level, "message", TRUE, NULL);
        g_object_set (levelout, "message", TRUE, NULL);
        g_object_set (level, "peak-ttl", 300000000000, NULL);


        gst_bin_add_many (GST_BIN (audio), conv, resample, level, gain, equalizer, levelout, vol, sink, NULL);
        gst_element_link (conv,resample);

        gst_element_link_filtered (resample, level, caps);
        gst_element_link (level, gain);
        gst_element_link (gain, equalizer);
        gst_element_link (equalizer, vol);
        gst_element_link_filtered (vol, levelout, caps);
        gst_element_link (levelout,sink);

        gst_element_add_pad (audio, gst_ghost_pad_new ("sink", audiopad));
        gst_bin_add (GST_BIN (pipeline), audio);


        GstElement *l_src;
        l_src = gst_element_factory_make ("filesrc", "localsrc");
        gst_bin_add_many (GST_BIN (pipeline), l_src, NULL);
        gst_element_set_state (l_src, GST_STATE_NULL);
        gst_element_link ( l_src,dec);

#ifdef GST_API_VERSION_1
        gst_bus_set_sync_handler (bus, bus_cb, this, NULL);
#else
        gst_bus_set_sync_handler (bus, bus_cb, this);
#endif
        gst_object_unref (audiopad);

        return pipeline;
}
Exemplo n.º 21
0
KUIRecord::KUIRecord(QString format, QRect area, QString camera, QString screen )
{

  GstElement *camSource, *camQueue1, *camColor, *camRate, *camMux, *camQueue2, *camSink;

  camSource = gst_element_factory_make ("v4l2src", "camSource");
  camColor = gst_element_factory_make ("ffmpegcolorspace", "camColorpsace");
  camRate = gst_element_factory_make("videorate", "camRate");
  camSink = gst_element_factory_make ("filesink", "camSink");
  camQueue1 = gst_element_factory_make("multiqueue", "camqueue1");
  camQueue2 = gst_element_factory_make("multiqueue", "camqueue2");
  
  if ( format==QString("avi") ) {
    camEnc = gst_element_factory_make ("xvidenc", "camEnc");
    camMux = gst_element_factory_make ("avimux", "camMux");
  } else {
    camEnc = gst_element_factory_make ("theoraenc", "camEnc");
    camMux = gst_element_factory_make ("oggmux", "camMux");
  }
  
  GstElement *screenSource, *screenRate, *screenColor, *screenMux, *screenSink, *screenQueue1, *screenQueue2;
  
  screenSource = gst_element_factory_make("ximagesrc", "screenSource");
  screenRate = gst_element_factory_make("videorate", "screenRate");
  screenColor = gst_element_factory_make("ffmpegcolorspace", "screenColor");
  screenSink = gst_element_factory_make("filesink", "screenSink");
  screenQueue1 = gst_element_factory_make("multiqueue", "screenqueue1");
  screenQueue2 = gst_element_factory_make("multiqueue", "screenqueue2");
  
  
  if ( format==QString("avi") ) {
    screenEnc = gst_element_factory_make("xvidenc", "screenEnc");
    screenMux = gst_element_factory_make("avimux", "screenMux");
  } else {
    screenEnc = gst_element_factory_make("theoraenc", "screenEnc");
    screenMux = gst_element_factory_make("oggmux", "screenMux");
  }

  pipeline = gst_pipeline_new ("recorder");
  
  camera.append( "." ).append( format );
  screen.append( "." ).append( format );
  

  
  qDebug() << camera;
  qDebug() << screen;
  
  char cameraChar[ camera.length()+1 ];
  memset( cameraChar, 0, camera.length()+1 );
  
  char screenChar[ screen.length()+1 ];
  memset( screenChar, 0, screen.length()+1 );
  
  for ( int i=0; i!=camera.length(); ++i ) {
    cameraChar[i] = camera.at(i).toAscii();
  }
  
  for ( int i=0; i!=screen.length(); ++i ) {
    screenChar[i] = screen.at(i).toAscii();
  }
  
  g_object_set (G_OBJECT (screenSource), "use-damage", false , NULL);
  g_object_set (G_OBJECT (screenSource), "startx", area.topLeft().x() , NULL);
  g_object_set (G_OBJECT (screenSource), "starty", area.topLeft().y() , NULL);
  g_object_set (G_OBJECT (screenSource), "endx", area.bottomRight().x() , NULL);
  g_object_set (G_OBJECT (screenSource), "endy", area.bottomRight().y() , NULL);
  
  g_object_set (G_OBJECT (camMux), "max-delay", 5000 , NULL);
  
  
  g_object_set (G_OBJECT (screenQueue1), "max-size-buffers", 10 , NULL);
  g_object_set (G_OBJECT (screenQueue2), "max-size-buffers", 10 , NULL);
  g_object_set (G_OBJECT (camQueue1), "max-size-buffers", 10 , NULL);
  g_object_set (G_OBJECT (camQueue1), "max-size-buffers", 10 , NULL);
  
  g_object_set (G_OBJECT (camSink), "location", cameraChar,  NULL);
  g_object_set (G_OBJECT (screenSink), "location", screenChar,  NULL);
  
  GstCaps *screenCaps;

  screenCaps = gst_caps_new_simple ("video/x-raw-rgb",
			      "framerate", GST_TYPE_FRACTION, 10, 1,
			      // "width", G_TYPE_INT , 200,
	  	              // "height", G_TYPE_INT, 200,
	  	              //"pixel-aspect-ratio", GST_TYPE_FRACTION, 1 , 1,
		             NULL);
  
  GstCaps *cameraCaps;

  cameraCaps = gst_caps_new_simple ("video/x-raw-rgb",
			      "framerate", GST_TYPE_FRACTION, 10, 1,
			      // "width", G_TYPE_INT , 200,
	  	              // "height", G_TYPE_INT, 200,
	  	              //"pixel-aspect-ratio", GST_TYPE_FRACTION, 1 , 1,
		             NULL);
  
    
  
  GstElement *audioSource, *audioRate, *audioConvert, *audioSample;
  
  audioSource = gst_element_factory_make("autoaudiosrc", "audioSource");
  audioRate = gst_element_factory_make("audiorate", "audioRate");
  audioConvert = gst_element_factory_make("audioconvert", "audioConvert");
  audioSample = gst_element_factory_make("audiosample", "audioSample" );
  audioEnc = gst_element_factory_make("vorbisenc", "audioEnc");
  
  
  gst_bin_add_many (GST_BIN (pipeline),
			     screenSource,
			     screenRate,
			     screenColor,
			     screenEnc,
			     screenMux,
		             screenSink,
			     screenQueue1,
			     screenQueue2,
		    
		             camSource,
			     camColor,
			     camQueue1,
			     camRate,
			     camEnc,
			     camMux,
			     camQueue2,
			     camSink,
		
			     audioSource,
			     
			     audioConvert,
			     audioRate,
			     audioEnc,
		  
			     NULL);
  
  
  
  gst_element_link_filtered (screenSource, screenColor, screenCaps);
  gst_element_link_many (screenColor, screenRate, screenQueue1, screenEnc,screenQueue2 , screenMux, screenSink, NULL);

  
  gst_element_link_filtered (camSource, camColor, cameraCaps);
  gst_element_link_many ( camColor, camRate, camQueue2, camEnc, camQueue1, camMux, camSink,  NULL);
  
  gst_element_link_many ( audioSource, audioConvert, audioRate, audioEnc, camQueue1, camMux,  NULL);
  
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

}
Exemplo n.º 22
0
int
main (gint argc, gchar ** argv)
{
  static GMainLoop *loop;
  GstCaps *caps;
  GstBus *bus;
  gchar *uri;

  GOptionEntry options[] = {
    {"control-volume", 'c', 0, G_OPTION_ARG_NONE, &ctrlvol,
        "Control the volume by hiding the nose or mouth", NULL},
    {"silent", 's', 0, G_OPTION_ARG_NONE, &silent,
        "Don't output the messages and detected faces structure", NULL},
    {NULL}
  };
  GOptionContext *ctx;
  GError *err = NULL;

  ctx = g_option_context_new ("<video file>\n\nfacedetect test application.");
  g_option_context_add_main_entries (ctx, options, NULL);
  g_option_context_add_group (ctx, gst_init_get_option_group ());
  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing: %s\n", err->message);
    exit (1);
  }
  g_option_context_free (ctx);

  if (argc < 2) {
    fprintf (stderr, "oops, please give a file to play\n");
    return -1;
  }

  uri = g_filename_to_uri (argv[1], NULL, NULL);
  if (!uri) {
    fprintf (stderr, "failed to create the uri\n");
    return -1;
  }

  /* init gst */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);
  /* init elements */
  playbin = gst_element_factory_make ("playbin", "app_playbin");
  pipeline = gst_pipeline_new ("app_pipeline");
  v4l2src = gst_element_factory_make ("v4l2src", "app_v4l2src");
  videoscale = gst_element_factory_make ("videoscale", "app_videoscale");
  videoconvert_in =
      gst_element_factory_make ("videoconvert", "app_videoconvert_in");
  facedetect = gst_element_factory_make ("facedetect", "app_facedetect");
  videoconvert_out =
      gst_element_factory_make ("videoconvert", "app_videoconvert_out");
  autovideosink =
      gst_element_factory_make ("autovideosink", "app_autovideosink");

  /* check init results */
  if (!playbin || !pipeline || !v4l2src || !videoscale || !videoconvert_in
      || !facedetect || !videoconvert_out || !autovideosink)
    g_error ("ERROR: element init failed.\n");

  /* set values */
  g_object_set (G_OBJECT (playbin), "uri", uri, NULL);

  /* set caps */
  caps =
      gst_caps_from_string
      ("video/x-raw, format=(string)RGB, width=320, height=240, framerate=(fraction)30/1");

  /* set bus */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, pipeline,
      NULL);
  gst_object_unref (bus);

  /* add elements to pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
      v4l2src,
      videoscale,
      videoconvert_in, facedetect, videoconvert_out, autovideosink, NULL);

  /* negotiate caps */
  if (!gst_element_link_filtered (v4l2src, videoscale, caps)) {
    g_printerr ("ERROR:v4l2src -> videoscale caps\n");
    return 0;
  }
  gst_caps_unref (caps);

  /* link elements */
  gst_element_link_many (videoscale,
      videoconvert_in, facedetect, videoconvert_out, autovideosink, NULL);

  /* change states */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* start main loop */
  g_main_loop_run (loop);

  /* clean all */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  gst_element_set_state (playbin, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (playbin));

  return 0;
}
Exemplo n.º 23
0
static gboolean initialize_pipeline()
{
	GstElement *pipeline, *camera_src, *image_sink;
	GstElement *image_queue;
	GstElement *csp_filter, *image_filter, *tee;
	GstCaps *caps;
	GstBus *bus;

	gst_init(NULL, NULL);

	pipeline = gst_pipeline_new("test-camera");

	bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
	gst_bus_add_watch(bus, (GstBusFunc)bus_callback, &cappdata);
	gst_object_unref(GST_OBJECT(bus));

	cappdata->pipeline = pipeline;

	camera_src = gst_element_factory_make(VIDEO_SRC, "camera_src");
	csp_filter = gst_element_factory_make("ffmpegcolorspace", "csp_filter");
	tee = gst_element_factory_make("tee", "tee");

	image_queue = gst_element_factory_make("queue", "image_queue");
	image_filter = gst_element_factory_make("ffmpegcolorspace", "image_filter");
	image_sink = gst_element_factory_make("fakesink", "image_sink");

	if(!(pipeline && camera_src && csp_filter
		&& image_queue && image_filter && image_sink))
	{
		g_critical("Couldn't create pipeline elements");
		return FALSE;
	}


	g_object_set(G_OBJECT(image_sink),
			"signal-handoffs", TRUE, NULL);

	gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,
			tee, image_queue,
			image_filter, image_sink, NULL);

	caps = gst_caps_new_simple("video/x-raw-rgb",
			"width", G_TYPE_INT, 640,
			"height", G_TYPE_INT, 480,
			NULL);

	if(!gst_element_link_filtered(camera_src, csp_filter, caps))
	{
		return FALSE;
	}
	gst_caps_unref(caps);

	if(!gst_element_link_many(csp_filter, tee, NULL))
	{
		return FALSE;
	}


	caps = gst_caps_new_simple("video/x-raw-rgb",
			"width", G_TYPE_INT, 640,
			"height", G_TYPE_INT, 480,
			"bpp", G_TYPE_INT, 24,
			"depth", G_TYPE_INT, 24,
			"framerate", GST_TYPE_FRACTION, 15, 1,
			NULL);

	if(!gst_element_link_many(tee, image_queue, image_filter, NULL)) return FALSE;
	if(!gst_element_link_filtered(image_filter, image_sink, caps)) return FALSE;

	gst_caps_unref(caps);

	gst_element_set_state(pipeline, GST_STATE_PLAYING);

	return TRUE;
}
void
_bp_vis_pipeline_setup (BansheePlayer *player)
{
    // The basic pipeline we're constructing is:
    // .audiotee ! queue ! audioresample ! audioconvert ! fakesink

    GstElement *fakesink, *converter, *resampler, *audiosinkqueue;
    GstCaps *caps;
    GstPad *teepad;
    GstPad *pad;

    player->vis_buffer = NULL;
    player->vis_fft = gst_fft_f32_new (SLICE_SIZE * 2, FALSE);
    player->vis_fft_buffer = g_new (GstFFTF32Complex, SLICE_SIZE + 1);
    player->vis_fft_sample_buffer = g_new0 (gfloat, SLICE_SIZE);
    
    // Core elements, if something fails here, it's the end of the world
    audiosinkqueue = gst_element_factory_make ("queue", "vis-queue");

    pad = gst_element_get_static_pad (audiosinkqueue, "sink");
    gst_pad_add_event_probe (pad, G_CALLBACK (_bp_vis_pipeline_event_probe), player);
    gst_object_unref (GST_OBJECT (pad));

    resampler = gst_element_factory_make ("audioresample", "vis-resample");
    converter = gst_element_factory_make ("audioconvert", "vis-convert");
    fakesink = gst_element_factory_make ("fakesink", "vis-sink");

    if (audiosinkqueue == NULL || resampler == NULL || converter == NULL || fakesink == NULL) {
        bp_debug ("Could not construct visualization pipeline, a fundamental element could not be created");
        return;
    }

    // Keep around the 5 most recent seconds of audio so that when resuming
    // visualization we have something to show right away.
    g_object_set (G_OBJECT (audiosinkqueue),
            "leaky", 2,
            "max-size-buffers", 0,
            "max-size-bytes", 0,
            "max-size-time", GST_SECOND * 5,
            NULL);
    
    g_signal_connect (G_OBJECT (fakesink), "handoff", G_CALLBACK (bp_vis_pcm_handoff), player);

    g_object_set (G_OBJECT (fakesink),
            // This enables the handoff signal.
            "signal-handoffs", TRUE,
            // Synchronize so we see vis at the same time as we hear it.
            "sync", TRUE,
            // Drop buffers if they come in too late.  This is mainly used when
            // thawing the vis pipeline.
            "max-lateness", GST_SECOND / 120,
            // Deliver buffers one frame early.  This allows for rendering
            // time.  (TODO: It would be great to calculate this on-the-fly so
            // we match the rendering time.
            "ts-offset", -GST_SECOND / 60,
            // Don't go to PAUSED when we freeze the pipeline.
            "async", FALSE, NULL);
    
    gst_bin_add_many (GST_BIN (player->audiobin), audiosinkqueue, resampler,
                      converter, fakesink, NULL);
    
    pad = gst_element_get_static_pad (audiosinkqueue, "sink");
    teepad = gst_element_get_request_pad (player->audiotee, "src%d");
    gst_pad_link (teepad, pad);
    gst_object_unref (GST_OBJECT (teepad));
    gst_object_unref (GST_OBJECT (pad));
    
    gst_element_link_many (audiosinkqueue, resampler, converter, NULL);
    
    caps = gst_static_caps_get (&vis_data_sink_caps);
    gst_element_link_filtered (converter, fakesink, caps);
    gst_caps_unref (caps);
    
    player->vis_buffer = gst_adapter_new ();
    player->vis_resampler = resampler;
    player->vis_thawing = FALSE;
    player->vis_enabled = FALSE;

    // Disable the pipeline till we hear otherwise from managed land.
    _bp_vis_pipeline_set_blocked (player, TRUE);
}
Exemplo n.º 25
0
bool GstEnginePipeline::Init() {
  // Here we create all the parts of the gstreamer pipeline - from the source
  // to the sink.  The parts of the pipeline are split up into bins:
  //   uri decode bin -> audio bin
  // The uri decode bin is a gstreamer builtin that automatically picks the
  // right type of source and decoder for the URI.

  // The audio bin gets created here and contains:
  //   queue ! audioconvert ! <caps32>
  //         ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee
  // rgvolume and rglimiter are only created when replaygain is enabled.

  // After the tee the pipeline splits.  One split is converted to 16-bit int
  // samples for the scope, the other is kept as float32 and sent to the
  // speaker.
  //   tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink
  //   tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale
  //        ! convert ! audiosink

  // Audio bin
  audiobin_ = gst_bin_new("audiobin");
  gst_bin_add(GST_BIN(pipeline_), audiobin_);

  // Create the sink
  if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false;

  if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") &&
      !device_.toString().isEmpty()) {
    switch (device_.type()) {
      case QVariant::Int:
        g_object_set(G_OBJECT(audiosink_),
                     "device", device_.toInt(),
                     nullptr);
        break;
      case QVariant::String:
        g_object_set(G_OBJECT(audiosink_),
                     "device", device_.toString().toUtf8().constData(),
                     nullptr);
        break;

      #ifdef Q_OS_WIN32
      case QVariant::ByteArray: {
        GUID guid = QUuid(device_.toByteArray());
        g_object_set(G_OBJECT(audiosink_),
                     "device", &guid,
                     nullptr);
        break;
      }
      #endif  // Q_OS_WIN32

      default:
        qLog(Warning) << "Unknown device type" << device_;
        break;
    }
  }

  // Create all the other elements
  GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue,
      *convert;

  queue_ = engine_->CreateElement("queue2", audiobin_);
  audioconvert_ = engine_->CreateElement("audioconvert", audiobin_);
  tee = engine_->CreateElement("tee", audiobin_);

  probe_queue = engine_->CreateElement("queue", audiobin_);
  probe_converter = engine_->CreateElement("audioconvert", audiobin_);
  probe_sink = engine_->CreateElement("fakesink", audiobin_);

  audio_queue = engine_->CreateElement("queue", audiobin_);
  equalizer_preamp_ = engine_->CreateElement("volume", audiobin_);
  equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_);
  stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_);
  volume_ = engine_->CreateElement("volume", audiobin_);
  audioscale_ = engine_->CreateElement("audioresample", audiobin_);
  convert = engine_->CreateElement("audioconvert", audiobin_);

  if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter ||
      !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ ||
      !stereo_panorama_ || !volume_ || !audioscale_ || !convert) {
    return false;
  }

  // Create the replaygain elements if it's enabled.  event_probe is the
  // audioconvert element we attach the probe to, which will change depending
  // on whether replaygain is enabled.  convert_sink is the element after the
  // first audioconvert, which again will change.
  GstElement* event_probe = audioconvert_;
  GstElement* convert_sink = tee;

  if (rg_enabled_) {
    rgvolume_ = engine_->CreateElement("rgvolume", audiobin_);
    rglimiter_ = engine_->CreateElement("rglimiter", audiobin_);
    audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_);
    event_probe = audioconvert2_;
    convert_sink = rgvolume_;

    if (!rgvolume_ || !rglimiter_ || !audioconvert2_) {
      return false;
    }

    // Set replaygain settings
    g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr);
    g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr);
    g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_),
                 nullptr);
  }

  // Create a pad on the outside of the audiobin and connect it to the pad of
  // the first element.
  GstPad* pad = gst_element_get_static_pad(queue_, "sink");
  gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
  gst_object_unref(pad);

  // Add a data probe on the src pad of the audioconvert element for our scope.
  // We do it here because we want pre-equalized and pre-volume samples
  // so that our visualization are not be affected by them.
  pad = gst_element_get_static_pad(event_probe, "src");
  gst_pad_add_event_probe(pad, G_CALLBACK(EventHandoffCallback), this);
  gst_object_unref(pad);

  // Configure the fakesink properly
  g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr);

  // Set the equalizer bands
  g_object_set(G_OBJECT(equalizer_), "num-bands", 10, nullptr);

  int last_band_frequency = 0;
  for (int i = 0; i < kEqBandCount; ++i) {
    GstObject* band =
        gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), i);

    const float frequency = kEqBandFrequencies[i];
    const float bandwidth = frequency - last_band_frequency;
    last_band_frequency = frequency;

    g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth,
                 "gain", 0.0f, nullptr);
    g_object_unref(G_OBJECT(band));
  }

  // Set the stereo balance.
  g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_,
               nullptr);

  // Set the buffer duration.  We set this on this queue instead of the
  // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin
  // only affects network sources.
  // Disable the default buffer and byte limits, so we only buffer based on
  // time.
  g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_,
               nullptr);
  g_object_set(G_OBJECT(queue_), "low-percent", buffer_min_fill_, nullptr);

  if (buffer_duration_nanosec_ > 0) {
    g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr);
  }

  gst_element_link(queue_, audioconvert_);

  // Create the caps to put in each path in the tee.  The scope path gets 16-bit
  // ints and the audiosink path gets float32.
  GstCaps* caps16 =
      gst_caps_new_simple("audio/x-raw-int", "width", G_TYPE_INT, 16, "signed",
                          G_TYPE_BOOLEAN, true, nullptr);
  GstCaps* caps32 = gst_caps_new_simple("audio/x-raw-float", "width",
                                        G_TYPE_INT, 32, nullptr);
  if (mono_playback_) {
    gst_caps_set_simple(caps32, "channels", G_TYPE_INT, 1, nullptr);
  }

  // Link the elements with special caps
  gst_element_link_filtered(probe_converter, probe_sink, caps16);
  gst_element_link_filtered(audioconvert_, convert_sink, caps32);
  gst_caps_unref(caps16);
  gst_caps_unref(caps32);

  // Link the outputs of tee to the queues on each path.
  gst_pad_link(gst_element_get_request_pad(tee, "src%d"),
               gst_element_get_static_pad(probe_queue, "sink"));
  gst_pad_link(gst_element_get_request_pad(tee, "src%d"),
               gst_element_get_static_pad(audio_queue, "sink"));

  // Link replaygain elements if enabled.
  if (rg_enabled_) {
    gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr);
  }

  // Link everything else.
  gst_element_link(probe_queue, probe_converter);
  gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_,
                        stereo_panorama_, volume_, audioscale_, convert,
                        audiosink_, nullptr);

  // Add probes and handlers.
  gst_pad_add_buffer_probe(gst_element_get_static_pad(probe_converter, "src"),
                           G_CALLBACK(HandoffCallback), this);
  gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                           BusCallbackSync, this);
  bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                                 BusCallback, this);

  MaybeLinkDecodeToAudio();

  return true;
}
Exemplo n.º 26
0
gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline;
  pthread_t threadid;
  void *exit_status;

  if (argc < 2) {
     g_print ("[E] please provide a port number\n");
     return -1;
  }
  int portno = atoi(argv[1]);

  /* init */
  gst_init (&argc, &argv);

  /* create pipeline, add handler */
  pipeline = gst_pipeline_new ("my_pipeline");

	// create elements
  source = gst_element_factory_make("alsasrc", "source");
	filter = gst_element_factory_make("audioconvert", "filter");
//	filter2 = gst_element_factory_make("cutter", "filter2");
	filter2 = gst_element_factory_make("wavenc", "filter2");
	sink = gst_element_factory_make("multiudpsink", "sink");
	if (!source || !filter || !filter2 || !sink) {
    if (!source) g_print ("[E] failed to create alsasrc element\n");
    if (!filter) g_print ("[E] failed to create audioconvert element\n");
    //if (!filter2) g_print ("[E] failed to create cutter element\n");
    if (!filter2) g_print ("[E] failed to create wavenc element\n");
    if (!sink) g_print ("[E] failed to create multiudpsink element\n");
    return -1;
  }
  
  g_object_set(G_OBJECT(source), "device", "plughw:1,0", NULL);
  
	// add to pipeline before linking
	gst_bin_add_many(GST_BIN(pipeline), source, filter, filter2, sink, NULL);

	GstCaps *caps = gst_caps_new_simple("audio/x-raw-int",
                                      "rate", G_TYPE_INT, 16000,
                                      "channels", G_TYPE_INT, 1,
                                      "depth", G_TYPE_INT, 16,
                                      NULL);
	
	// link
	if (!gst_element_link_filtered(source, filter, caps) ||
	    !gst_element_link(filter, filter2) ||
	    !gst_element_link(filter2, sink)) {
		g_print("[E] failed to link elements");
	  gst_object_unref (pipeline);
		return -1;
	}
	// start playing
	gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

	// do this inside a thread:
	g_print("[I] starting udp server thread\n");
	pthread_create(&threadid, NULL, run_upd_listen, &portno);

	g_print("[I] starting gstreamer main loop\n");
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  /* clean up */
  pthread_join(threadid, &exit_status);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  g_main_loop_unref (loop);

  return 0;
}
Exemplo n.º 27
0
/* Creates simple bin that plays a file with decodebin.
*  Should be used as the last input of the video mixer. 
*  Scaling should be set when linked to the mixer element! 
*  bin elements : filesrc ! decodebin ! videoscale ! queue
                                  decodebin.audio ' aqueue
*  src ghost pads are added as outputs to the bin.*/
GstElement* introbin_new(CustomData *data)
{
   GstElement *introbin,*introsrc,*introdec,*introscale,*introvqueue,
           *introatee,*introaqueue1,*introaqueue2,*introaudiosink,*introaudiorate,
	   *introvvalve,*introavalve;
   GstCaps *scale_caps,*audio_caps;
   GstPad *vpad,*apad;
   //Create bin, elements, caps and link everything.
   introbin=gst_bin_new("introbin");
   introsrc=gst_element_factory_make("filesrc","introsrc");
   introdec=gst_element_factory_make("decodebin","introdec");
   introscale=gst_element_factory_make("videoscale","introscale");
   introvqueue=gst_element_factory_make("queue","introvqueue");
   introvvalve=gst_element_factory_make("valve","introvvalve");
   introaqueue1=gst_element_factory_make("queue","introaqueue1");
   introaqueue2=gst_element_factory_make("queue","introaqueue2");
   introavalve=gst_element_factory_make("valve","introavalve");
   introatee=gst_element_factory_make("tee","introatee");
   introaudiosink=gst_element_factory_make("alsasink","introaudiosink");
   introaudiorate=gst_element_factory_make("audioresample","introaudiorate");
   g_object_set(G_OBJECT(introaudiosink),"device",DEV_SPEAKER,"sync",TRUE,"qos",FALSE,NULL);
   gst_bin_add_many(GST_BIN(introbin),introsrc,introdec,introscale,introvqueue,
       introatee,introaqueue1,introaqueue2,introaudiosink,introaudiorate,
       introvvalve,introavalve,NULL);
   scale_caps=gst_caps_new_simple("video/x-raw",
//                                "format",G_TYPE_STRING,"YUV",
//                                "alpha",G_TYPE_INT,0,
//                                "framerate",GST_TYPE_FRACTION,FRAMES_PER_SEC,1,
                                "width",G_TYPE_INT,CAMERA_RES_WIDTH,
                                "height",G_TYPE_INT,CAMERA_RES_HEIGHT,
                                NULL);
   audio_caps=gst_caps_new_simple("audio/x-raw",
                                  "format",G_TYPE_STRING,"S16LE",
				  "rate",G_TYPE_INT,44100,
				  "layout",G_TYPE_STRING,"interleaved",
				  "channels",G_TYPE_INT,2,
				  NULL);
   gst_element_link_many(introsrc,introdec,NULL);
   /* decodebin's src pad is a sometimes pad - it gets created dynamically */
   g_signal_connect(introdec,"pad-added",G_CALLBACK(on_new_decoded_pad),data);
   gst_element_link_filtered(introscale,introvqueue,scale_caps);
   gst_element_link(introvqueue,introvvalve);
   gst_element_link_many(introatee,introaqueue1,introaudiosink,NULL);
   gst_element_link_filtered(introaudiorate,introatee,audio_caps);
   gst_element_link_many(introatee,introaqueue2,introavalve,NULL);
   gst_caps_unref(scale_caps);
//   gst_element_link(introaqueue,introafakesink);
    //Create the ghost src pad for the bin.
   vpad=gst_element_get_static_pad(introvvalve,"src");
   gst_element_add_pad(introbin,gst_ghost_pad_new("vsrc",vpad));
   GstPad *vgpad=gst_element_get_static_pad(introbin,"vsrc");
   gst_pad_add_probe (vgpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
         (GstPadProbeCallback) introbin_pad_block_cb,data, NULL);
   gst_object_unref(vpad);
   gst_object_unref(vgpad);
   
   apad=gst_element_get_static_pad(introavalve,"src");
   gst_element_add_pad(introbin,gst_ghost_pad_new("asrc",apad));
   GstPad *agpad=gst_element_get_static_pad(introbin,"asrc");
   gst_pad_add_probe (agpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
         (GstPadProbeCallback) introbin_pad_block_cb,data, NULL);
   gst_object_unref(apad);
   gst_object_unref(agpad);
   /* set initial parameters */
   g_object_set(G_OBJECT(introsrc),"location",data->config[data->selected_config].intro,NULL);
//   g_object_set(G_OBJECT(introafakesink),"sync",TRUE,NULL);
//   g_object_set(G_OBJECT(bkgqueue),"leaky",2,NULL);
   
   /* set eos handler function */
//   dec_pad_sink=gst_element_get_static_pad(bkgdec,"sink");
//   gst_pad_set_event_function(dec_pad_sink,eos_callback);

   return introbin;
}
Exemplo n.º 28
0
gint main (gint argc, gchar *argv[])
{
    if (argc != 2)
    {
        g_warning ("usage: cubeyuv.exe videolocation\n");
        return -1;
    }

    std::string video_location(argv[1]);

    /* initialization */
    gst_init (&argc, &argv);
    GMainLoop* loop = g_main_loop_new (NULL, FALSE);

    /* create elements */
    GstElement* pipeline = gst_pipeline_new ("pipeline");

    /* watch for messages on the pipeline's bus (note that this will only
     * work like this when a GLib main loop is running) */
    GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    /* create elements */
    GstElement* videosrc = gst_element_factory_make ("filesrc", "filesrc0");
    GstElement* decodebin = gst_element_factory_make ("decodebin", "decodebin");
    GstElement* identity  = gst_element_factory_make ("identity", "identity0");
    GstElement* textoverlay = gst_element_factory_make ("textoverlay", "textoverlay0");
    GstElement* glcolorscale = gst_element_factory_make ("glcolorscale", "glcolorscale0");
    GstElement* glimagesink  = gst_element_factory_make ("glimagesink", "glimagesink0");


    if (!videosrc || !decodebin || !identity || !textoverlay ||
        !glcolorscale || !glimagesink)
    {
        g_print ("one element could not be found \n");
        return -1;
    }

    GstCaps *outcaps = gst_caps_new_simple("video/x-raw",
                                           "width", G_TYPE_INT, 640,
                                           "height", G_TYPE_INT, 480,
                                           NULL);

    /* configure elements */
    g_object_set(G_OBJECT(videosrc), "num-buffers", 800, NULL);
    g_object_set(G_OBJECT(videosrc), "location", video_location.c_str(), NULL);
    g_signal_connect(identity, "handoff", G_CALLBACK(identityCallback), textoverlay) ;
    g_object_set(G_OBJECT(textoverlay), "font_desc", "Ahafoni CLM Bold 30", NULL);
    g_signal_connect(G_OBJECT(glimagesink), "client-reshape", G_CALLBACK (reshapeCallback), NULL);
    g_signal_connect(G_OBJECT(glimagesink), "client-draw", G_CALLBACK (drawCallback), NULL);

    /* add elements */
    gst_bin_add_many (GST_BIN (pipeline), videosrc, decodebin, identity,
        textoverlay, glcolorscale, glimagesink, NULL);

    /* link elements */
	gst_element_link_pads (videosrc, "src", decodebin, "sink");

    g_signal_connect (decodebin, "pad-added", G_CALLBACK (cb_new_pad), identity);

    if (!gst_element_link_pads(identity, "src", textoverlay, "video_sink"))
    {
        g_print ("Failed to link identity to textoverlay!\n");
        return -1;
    }

	gst_element_link (textoverlay, glcolorscale);

    gboolean link_ok = gst_element_link_filtered(glcolorscale, glimagesink, outcaps) ;
    gst_caps_unref(outcaps) ;
    if(!link_ok)
    {
        g_warning("Failed to link textoverlay to glimagesink!\n") ;
        return -1 ;
    }

    /* run */
    GstStateChangeReturn ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_print ("Failed to start up pipeline!\n");

        /* check if there is an error message with details on the bus */
        GstMessage* msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
        if (msg)
        {
          GError *err = NULL;

          gst_message_parse_error (msg, &err, NULL);
          g_print ("ERROR: %s\n", err->message);
          g_error_free (err);
          gst_message_unref (msg);
        }
        return -1;
    }

    g_main_loop_run (loop);

    /* clean up */
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);

    return 0;
}
Exemplo n.º 29
0
static gint create_encoder_pipeline (Encoder *encoder)
{
        GstElement *pipeline, *element;
        Bin *bin;
        Link *link;
        GSList *bins, *links, *elements;
        GstElementFactory *element_factory;
        GType type;
        EncoderStream *stream;
        GstAppSrcCallbacks callbacks = {
                need_data_callback,
                NULL,
                NULL
        };
        GstAppSinkCallbacks encoder_appsink_callbacks = {
                NULL,
                NULL,
                new_sample_callback
        };
        GstCaps *caps;
        GstBus *bus;
 
        pipeline = gst_pipeline_new (NULL);

        /* add element to pipeline first. */
        bins = encoder->bins;
        while (bins != NULL) {
                bin = bins->data;
                elements = bin->elements;
                while (elements != NULL) {
                        element = elements->data;
                        if (!gst_bin_add (GST_BIN (pipeline), element)) {
                                GST_ERROR ("add element %s to bin %s error.", gst_element_get_name (element), bin->name);
                                return 1;
                        }
                        elements = g_slist_next (elements);
                }
                bins = g_slist_next (bins);
        }

        /* then links element. */
        bins = encoder->bins;
        while (bins != NULL) {
                bin = bins->data;
                element = bin->first;
                element_factory = gst_element_get_factory (element);
                type = gst_element_factory_get_element_type (element_factory);
                stream = NULL;
                if (g_strcmp0 ("GstAppSrc", g_type_name (type)) == 0) {
                        GST_INFO ("Encoder appsrc found.");
                        stream = encoder_get_stream (encoder, bin->name);
                        gst_app_src_set_callbacks (GST_APP_SRC (element), &callbacks, stream, NULL);
                }
                element = bin->last;
                element_factory = gst_element_get_factory (element);
                type = gst_element_factory_get_element_type (element_factory);
                if ((g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) ||
                    (g_strcmp0 ("GstHlsSink", g_type_name (type)) == 0) ||
                    (g_strcmp0 ("GstFileSink", g_type_name (type)) == 0)) {
                        GstPad *pad;

                        if (g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) {
                                GST_INFO ("Encoder appsink found.");
                                gst_app_sink_set_callbacks (GST_APP_SINK (element), &encoder_appsink_callbacks, encoder, NULL);
                        }
                        pad = gst_element_get_static_pad (element, "sink");
                        gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoder_appsink_event_probe, encoder, NULL);
                }
                links = bin->links;
                while (links != NULL) {
                        link = links->data;
                        GST_INFO ("link element: %s -> %s", link->src_name, link->sink_name);
                        if (link->caps != NULL) {
                                caps = gst_caps_from_string (link->caps);
                                gst_element_link_filtered (link->src, link->sink, caps);
                                gst_caps_unref (caps);

                        } else {
                                gst_element_link (link->src, link->sink);
                        }
                        links = g_slist_next (links);
                }
                bins = g_slist_next (bins);
        }
        bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
        gst_bus_add_watch (bus, bus_callback, encoder);
        g_object_unref (bus);
        encoder->pipeline = pipeline;

        return 0;
}
Exemplo n.º 30
0
int CGsCapture::Init()
{_STT();
	GstElement *pipeline, *camera_src, *screen_sink, *image_sink;
	GstElement *screen_queue, *image_queue;
	GstElement *csp_filter, *image_filter, *tee;
	GstCaps *caps;
	GstBus *bus;
	GMainLoop *loop;

	/* Initialize Gstreamer */
//	gst_init( 0, 0 );

oexM();
	GError *err = 0;
	if ( !gst_init_check( 0, 0, &err ) )
	{	oexSHOW( err->message );
		g_error_free( err );
		oexEcho( "gst_init_check() failed" );
		return -1;
	}
oexM();

	loop = g_main_loop_new (NULL, FALSE);
	oexSHOW( (int)loop );

	/* Create pipeline and attach a callback to it's
	 * message bus */
	pipeline = gst_pipeline_new("test-camera");

	bus = gst_pipeline_get_bus( GST_PIPELINE( pipeline ) );
	gst_bus_add_watch( bus, (GstBusFunc)bus_callback, loop );
	gst_object_unref( GST_OBJECT( bus ) );

/*
	GstElement *filesrc  = gst_element_factory_make ("filesrc", "my_filesource");
	if ( !filesrc )
	{
		oexEcho( "gst_element_factory_make() failed" );
		return -1;
	} // end if
*/

	/* Create elements */
	/* Camera video stream comes from a Video4Linux driver */
	camera_src = gst_element_factory_make("v4l2src", "camera_src");

//	gst_play_error_plugin (VIDEO_SRC, &err);

oexM();

	/* Colorspace filter is needed to make sure that sinks understands
	 * the stream coming from the camera */
	csp_filter = gst_element_factory_make("ffmpegcolorspace", "csp_filter");

oexM();

	/* Tee that copies the stream to multiple outputs */
	tee = gst_element_factory_make("tee", "tee");


	/* Queue creates new thread for the stream */
	screen_queue = gst_element_factory_make("queue", "screen_queue");


	/* Sink that shows the image on screen. Xephyr doesn't support XVideo
	 * extension, so it needs to use ximagesink, but the device uses
	 * xvimagesink */
	screen_sink = gst_element_factory_make(VIDEO_SINK, "screen_sink");
	/* Creates separate thread for the stream from which the image
	 * is captured */
	image_queue = gst_element_factory_make("queue", "image_queue");
	/* Filter to convert stream to use format that the gdkpixbuf library
	 * can use */
	image_filter = gst_element_factory_make("ffmpegcolorspace", "image_filter");

	/* A dummy sink for the image stream. Goes to bitheaven */
	image_sink = gst_element_factory_make("fakesink", "image_sink");

oexM();

	/* Check that elements are correctly initialized */
	if(!(pipeline && bus && camera_src && screen_sink && csp_filter && screen_queue
		&& image_queue && image_filter && image_sink))
	{
		oexSHOW( (long)pipeline );
		oexSHOW( (long)bus );
		oexSHOW( (long)camera_src );
		oexSHOW( (long)screen_sink );
		oexSHOW( (long)csp_filter );
		oexSHOW( (long)screen_queue );
		oexSHOW( (long)image_queue );
		oexSHOW( (long)image_filter );
		oexSHOW( (long)image_sink );

		oexEcho("Couldn't create pipeline elements");
		return -1;
	}

	/* Set image sink to emit handoff-signal before throwing away
	 * it's buffer */
	g_object_set(G_OBJECT(image_sink),
			"signal-handoffs", TRUE, NULL);

	/* Add elements to the pipeline. This has to be done prior to
	 * linking them */
	gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,
			tee, screen_queue, screen_sink, image_queue,
			image_filter, image_sink, NULL);

	/* Specify what kind of video is wanted from the camera */
	caps = gst_caps_new_simple("video/x-raw-rgb",
			"width", G_TYPE_INT, 640,
			"height", G_TYPE_INT, 480,
			NULL);


oexM();

	/* Link the camera source and colorspace filter using capabilities
	 * specified */
	if(!gst_element_link_filtered(camera_src, csp_filter, caps))
	{
		oexEcho( "gst_element_link_filtered() failed" );
		return -1;
	}
	gst_caps_unref(caps);

	/* Connect Colorspace Filter -> Tee -> Screen Queue -> Screen Sink
	 * This finalizes the initialization of the screen-part of the pipeline */
	if(!gst_element_link_many(csp_filter, tee, screen_queue, screen_sink, NULL))
	{
		oexEcho( "gst_element_link_many() failed" );
		return -1;
	}

	/* gdkpixbuf requires 8 bits per sample which is 24 bits per
	 * pixel */
	caps = gst_caps_new_simple("video/x-raw-rgb",
			"width", G_TYPE_INT, 640,
			"height", G_TYPE_INT, 480,
			"bpp", G_TYPE_INT, 24,
			"depth", G_TYPE_INT, 24,
			"framerate", GST_TYPE_FRACTION, 15, 1,
			NULL);

oexM();

	/* Link the image-branch of the pipeline. The pipeline is
	 * ready after this */
	if(!gst_element_link_many(tee, image_queue, image_filter, NULL))
	{	oexEcho( "gst_element_link_many() failed" );
		return -1;
	}

	if(!gst_element_link_filtered(image_filter, image_sink, caps))
	{	oexEcho( "gst_element_link_filtered() failed" );
		return -1;
	}

	gst_caps_unref(caps);

	/* As soon as screen is exposed, window ID will be advised to the sink */
//	g_signal_connect(appdata->screen, "expose-event", G_CALLBACK(expose_cb),
//			 screen_sink);

oexM();

	gst_element_set_state(pipeline, GST_STATE_PLAYING);

oexM();

	{ // Take snap shot

		GstElement *image_sink;

		/* Get the image sink element from the pipeline */
		image_sink = gst_bin_get_by_name(GST_BIN(pipeline),
				"image_sink");

		if ( !image_sink )
		{	oexEcho( "image_sink is null" );
			return -1;
		}


		/* Display a note to the user */
//		hildon_banner_show_information(GTK_WIDGET(appdata->window),
	//		NULL, "Taking Photo");

		/* Connect the "handoff"-signal of the image sink to the
		 * callback. This gets called whenever the sink gets a
		 * buffer it's ready to pass forward on the pipeline */
//		appdata->buffer_cb_id = g_signal_connect(
//				G_OBJECT(image_sink), "handoff",
//				G_CALLBACK(buffer_probe_callback), appdata);
	}

	return 0;
}