Example #1
0
static void
configure_test_element (GstBin * bin, const gchar * capsfilter)
{
  GstElement *filter;
  GstElement *identity;
  GstPad *pad, *ghostpad;
  GstPadTemplate *test_static_templ;

  filter = gst_element_factory_make ("capsfilter", NULL);
  fail_unless (filter != NULL);
  gst_util_set_object_arg (G_OBJECT (filter), "caps", capsfilter);

  identity = gst_element_factory_make ("identity", NULL);
  fail_unless (identity != NULL);

  gst_bin_add_many (bin, filter, identity, NULL);
  fail_unless (gst_element_link_many (filter, identity, NULL) == TRUE);


  test_static_templ = gst_static_pad_template_get (&sink_factory);
  pad = gst_element_get_static_pad (filter, "sink");
  ghostpad = gst_ghost_pad_new_from_template ("sink", pad, test_static_templ);
  gst_element_add_pad (GST_ELEMENT_CAST (bin), ghostpad);
  gst_object_unref (pad);
  gst_object_unref (test_static_templ);

  test_static_templ = gst_static_pad_template_get (&src_factory);
  pad = gst_element_get_static_pad (identity, "src");
  ghostpad = gst_ghost_pad_new_from_template ("src", pad, test_static_templ);
  gst_element_add_pad (GST_ELEMENT_CAST (bin), ghostpad);
  gst_object_unref (pad);
  gst_object_unref (test_static_templ);
}
static void
set_encoder_settings (GstElement *encoder, gchar *settings)
{
    int i;
    gchar **params = g_strsplit_set(settings, "= ", -1);
    /* for each property=value pair, we set it on our encoder */
    for (i = 0; params[i] != NULL; i+=2) {
        gst_util_set_object_arg (G_OBJECT(encoder), params[i], params[i+1]);
    }
    g_strfreev(params);
}
Example #3
0
GstElement * _owr_payload_create_encoder(OwrPayload *payload)
{
    GstElement *encoder = NULL;
    gchar *element_name = NULL;
    GstElementFactory *factory;
    const gchar *factory_name;

    g_return_val_if_fail(payload, NULL);

    switch (payload->priv->codec_type) {
    case OWR_CODEC_TYPE_H264:
        encoder = try_codecs(h264_encoders, "encoder");
        g_return_val_if_fail(encoder, NULL);

        factory = gst_element_get_factory(encoder);
        factory_name = gst_plugin_feature_get_name(factory);

        if (!strcmp(factory_name, "openh264enc")) {
            g_object_set(encoder, "gop-size", 0, NULL);
            gst_util_set_object_arg(G_OBJECT(encoder), "rate-control", "bitrate");
            g_object_bind_property(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE);
        } else if (!strcmp(factory_name, "x264enc")) {
            g_object_bind_property_full(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE,
                binding_transform_to_kbps, NULL, NULL, NULL);
            g_object_set(encoder, "tune", 0x04 /* zero-latency */, NULL);
        } else if (!strcmp(factory_name, "vtenc_h264")) {
            g_object_bind_property_full(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE,
                binding_transform_to_kbps, NULL, NULL, NULL);
        } else {
            /* Assume bits/s instead of kbit/s */
            g_object_bind_property(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE);
        }
        g_object_set(payload, "bitrate", evaluate_bitrate_from_payload(payload), NULL);
        break;

    case OWR_CODEC_TYPE_VP8:
        encoder = try_codecs(vp8_encoders, "encoder");
        g_return_val_if_fail(encoder, NULL);
        g_object_set(encoder, "end-usage", 1, "deadline", G_GINT64_CONSTANT(1), "lag-in-frames", 0,
            "error-resilient", 1, "keyframe-mode", 0, NULL);
        g_object_bind_property(payload, "bitrate", encoder, "target-bitrate", G_BINDING_SYNC_CREATE);
        g_object_set(payload, "bitrate", evaluate_bitrate_from_payload(payload), NULL);
        break;
    default:
        element_name = g_strdup_printf("encoder_%s_%u", OwrCodecTypeEncoderElementName[payload->priv->codec_type], get_unique_id());
        encoder = gst_element_factory_make(OwrCodecTypeEncoderElementName[payload->priv->codec_type], element_name);
        g_free(element_name);
        g_return_val_if_fail(encoder, NULL);
        break;
    }

    return encoder;
}
Example #4
0
/* called when a new media pipeline is constructed. We can query the
 * pipeline and configure our appsrc */
void ReStream::media_configure_feature (GstRTSPMediaFactory * factory, GstRTSPMedia * media, gpointer user_data)
{
	try{
		GstElement *element, *appsrc;
		struct MyContext *ctx  = (struct MyContext *)user_data;

		element = gst_rtsp_media_get_element (media);

		appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");

		gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
		g_object_set (G_OBJECT (appsrc), "caps",
				gst_caps_new_simple ("video/x-raw",
						"format", G_TYPE_STRING, "BGR",
						"width", G_TYPE_INT, ctx->width,
						"height", G_TYPE_INT,ctx->height,
						"framerate", GST_TYPE_FRACTION, FPS, 1, NULL),NULL );


		//ctx->white = FALSE;
		ctx->timestamp = 0;
		ctx->buffer =  gst_buffer_new_allocate (NULL, ctx->height * ctx->width*3 , NULL);
		gst_buffer_map (ctx->buffer, &ctx->map, GST_MAP_WRITE);

		/* make sure ther datais freed when the media is gone */
		//g_object_set_data_full (G_OBJECT (media), "my-extra-data", ctx, (GDestroyNotify) g_free);

		g_signal_connect (appsrc, "need-data", (GCallback)need_data_feature, ctx);
		g_signal_connect (media, "unprepared", (GCallback)unprepared_feature, ctx);
		if ( ISDEBUG )
			cout<<"media prepared_feature\n";
		gst_object_unref (appsrc);
		gst_object_unref (element);
	}
	catch(Exception &e){
		CommonClass localcommclass;
		localcommclass.PrintException("ReStream","CV::media_configure_feature",e);
	}
	catch(exception &e){
		CommonClass localcommclass;
		localcommclass.PrintException("ReStream","STD::media_configure_feature",e);
	}
}
/* this callback is called when playbin has constructed a source object to read
 * from. Since we provided the appsrc:// uri to playbin, this will be the
 * appsrc that we must handle. We set up some signals to push data into appsrc
 * and one to perform a seek. */
static void
found_source (GObject * object, GObject * orig, GParamSpec * pspec, App * app)
{
  /* get a handle to the appsrc */
  g_object_get (orig, pspec->name, &app->appsrc, NULL);

  GST_DEBUG ("got appsrc %p", app->appsrc);

  /* we can set the length in appsrc. This allows some elements to estimate the
   * total duration of the stream. It's a good idea to set the property when you
   * can but it's not required. */
  g_object_set (app->appsrc, "size", (gint64) app->length, NULL);
  gst_util_set_object_arg (G_OBJECT (app->appsrc), "stream-type",
      "random-access");

  /* configure the appsrc, we will push a buffer to appsrc when it needs more
   * data */
  g_signal_connect (app->appsrc, "need-data", G_CALLBACK (feed_data), app);
  g_signal_connect (app->appsrc, "seek-data", G_CALLBACK (seek_data), app);
}
gboolean
gst_validate_monitor_setup (GstValidateMonitor * monitor)
{
  GList *config;

  GST_DEBUG_OBJECT (monitor, "Starting monitor setup");

  for (config = gst_validate_plugin_get_config (NULL); config;
      config = config->next) {
    const gchar *verbosity =
        gst_structure_get_string (GST_STRUCTURE (config->data),
        "verbosity");

    if (verbosity)
      gst_util_set_object_arg (G_OBJECT (monitor), "verbosity", verbosity);
  }

  /* For now we just need to do this at setup time */
  _determine_reporting_level (monitor);
  return GST_VALIDATE_MONITOR_GET_CLASS (monitor)->setup (monitor);
}
/* called when a new media pipeline is constructed. We can query the
 * pipeline and configure our appsrc */
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media,
    gpointer user_data)
{
  GstElement *element, *appsrc;
  MyContext *ctx;

  /* get the element used for providing the streams of the media */
  element = gst_rtsp_media_get_element (media);

  //gst_element_set_base_time(GST_ELEMENT(element), 0);

  /* get our appsrc, we named it 'mysrc' with the name property */
  appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");

  /* this instructs appsrc that we will be dealing with timed buffer */
  gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
  /* configure the caps of the video */
  g_object_set (G_OBJECT (appsrc), "caps",
      gst_caps_new_simple ("video/x-raw",
          "format", G_TYPE_STRING, "RGB16",
          "width", G_TYPE_INT, 384,
          "height", G_TYPE_INT, 288,
          "framerate", GST_TYPE_FRACTION, 0, 1, NULL), NULL);

  ctx = g_new0 (MyContext, 1);
  ctx->white = FALSE;
  ctx->timestamp = 0;
  /* make sure ther datais freed when the media is gone */
  g_object_set_data_full (G_OBJECT (media), "my-extra-data", ctx,
      (GDestroyNotify) g_free);

  /* install the callback that will be called when a buffer is needed */
  g_signal_connect (appsrc, "need-data", (GCallback) need_data, ctx);
  gst_object_unref (appsrc);
  gst_object_unref (element);
}
static void
empathy_video_widget_set_property (GObject *object,
  guint property_id, const GValue *value, GParamSpec *pspec)
{
  EmpathyVideoWidgetPriv *priv = GET_PRIV (object);

  switch (property_id)
    {
      case PROP_GST_BUS:
        priv->bus = g_value_dup_object (value);
        break;
      case PROP_MIN_WIDTH:
        priv->min_width = g_value_get_int (value);
        break;
      case PROP_MIN_HEIGHT:
        priv->min_height = g_value_get_int (value);
        break;
      case PROP_SYNC:
        priv->sync = g_value_get_boolean (value);
        empathy_video_widget_element_set_sink_properties (
          EMPATHY_VIDEO_WIDGET (object));
        break;
      case PROP_ASYNC:
        priv->async = g_value_get_boolean (value);
        empathy_video_widget_element_set_sink_properties (
          EMPATHY_VIDEO_WIDGET (object));
        break;
      case PROP_FLIP_VIDEO:
        priv->flip_video = g_value_get_boolean (value);
        gst_util_set_object_arg (G_OBJECT (priv->flip), "method",
            priv->flip_video ? "horizontal-flip" : "none");
        break;
      default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
    }
}
int
main (int argc, char **argv)
{
  GOptionEntry options[] = {
    {"effects", 'e', 0, G_OPTION_ARG_STRING, &opt_effects,
        "Effects to use (comma-separated list of element names)", NULL},
    {NULL}
  };
  GOptionContext *ctx;
  GError *err = NULL;
  GMainLoop *loop;
  GstElement *src, *q1, *q2, *effect, *filter1, *filter2, *sink;
  gchar **effect_names, **e;

  ctx = g_option_context_new ("");
  g_option_context_add_main_entries (ctx, options, GETTEXT_PACKAGE);
  g_option_context_add_group (ctx, gst_init_get_option_group ());
  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing: %s\n", err->message);
    g_option_context_free (ctx);
    g_clear_error (&err);
    return 1;
  }
  g_option_context_free (ctx);

  GST_FIXME ("Multiple things to check/fix, see source code");

  if (opt_effects != NULL)
    effect_names = g_strsplit (opt_effects, ",", -1);
  else
    effect_names = g_strsplit (DEFAULT_EFFECTS, ",", -1);

  for (e = effect_names; e != NULL && *e != NULL; ++e) {
    GstElement *el;

    el = gst_element_factory_make (*e, NULL);
    if (el) {
      g_print ("Adding effect '%s'\n", *e);
      g_queue_push_tail (&effects, el);
    }
  }

  pipeline = gst_pipeline_new ("pipeline");

  src = gst_element_factory_make ("videotestsrc", NULL);
  g_object_set (src, "is-live", TRUE, NULL);

  filter1 = gst_element_factory_make ("capsfilter", NULL);
  gst_util_set_object_arg (G_OBJECT (filter1), "caps",
      "video/x-raw, width=320, height=240, "
      "format={ I420, YV12, YUY2, UYVY, AYUV, Y41B, Y42B, "
      "YVYU, Y444, v210, v216, NV12, NV21, UYVP, A420, YUV9, YVU9, IYU1 }");

  q1 = gst_element_factory_make ("queue", NULL);

  blockpad = gst_element_get_static_pad (q1, "src");

  conv_before = gst_element_factory_make ("videoconvert", NULL);

  effect = g_queue_pop_head (&effects);
  cur_effect = effect;

  conv_after = gst_element_factory_make ("videoconvert", NULL);

  q2 = gst_element_factory_make ("queue", NULL);

  filter2 = gst_element_factory_make ("capsfilter", NULL);
  gst_util_set_object_arg (G_OBJECT (filter2), "caps",
      "video/x-raw, width=320, height=240, "
      "format={ RGBx, BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR }");

  sink = gst_element_factory_make ("ximagesink", NULL);

  gst_bin_add_many (GST_BIN (pipeline), src, filter1, q1, conv_before, effect,
      conv_after, q2, sink, NULL);

  gst_element_link_many (src, filter1, q1, conv_before, effect, conv_after,
      q2, sink, NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  loop = g_main_loop_new (NULL, FALSE);

  gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop);

  g_timeout_add_seconds (1, timeout_cb, loop);

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}
int start_streaming(rtsplink_t *vidout, const char *host, const int port) {
  // set up gstreamer
  GstStateChangeReturn ret;
  gboolean link_ok;
  if (!gst_initialized) {
    gst_init(NULL, NULL);
  }

  // create elements
  vidout->src = (void *)gst_element_factory_make("v4l2src", "src");
  vidout->enc = (void *)gst_element_factory_make("x264enc", "enc");
  vidout->mux = (void *)gst_element_factory_make("mpegtsmux", "mux");
  vidout->sink = (void *)gst_element_factory_make("tcpserversink", "sink");

  // modify the element's properties
  g_object_set((GstElement *)vidout->src, "device", "/dev/video0", NULL);
  gst_util_set_object_arg(G_OBJECT((GstElement *)vidout->enc), "tune", "zerolatency");
  gst_util_set_object_arg(G_OBJECT((GstElement *)vidout->enc), "pass", "quant");
  g_object_set((GstElement *)vidout->enc, "quantizer", 20, NULL);
  g_object_set((GstElement *)vidout->sink, "host", host, NULL);
  g_object_set((GstElement *)vidout->sink, "port", port, NULL);

  // create capabilites
  vidout->caps = (void *)gst_caps_new_simple("video/x-raw",
      "width", G_TYPE_INT, 640,
      "height", G_TYPE_INT, 480,
      NULL);

  // create pipeline
  printf("creating pipeline\n");
  vidout->pipeline = (void *)gst_pipeline_new("vidpipeline");
  if (!vidout->src ||
      !vidout->enc ||
      !vidout->mux ||
      !vidout->sink ||
      !vidout->pipeline) {
    g_printerr("not all elements created %p %p %p %p %p\n",
      vidout->src,
      vidout->enc,
      vidout->mux,
      vidout->sink,
      vidout->pipeline);
    memset(vidout, 0, sizeof(rtsplink_t)); // TODO
    return -1;
  }

  // build pipeline
  printf("building pipeline\n");
  gst_bin_add_many(
      GST_BIN((GstElement *)vidout->pipeline),
      (GstElement *)vidout->src,
      (GstElement *)vidout->enc,
      (GstElement *)vidout->mux,
      (GstElement *)vidout->sink,
      NULL);
  link_ok = gst_element_link_filtered(
      (GstElement *)vidout->src,
      (GstElement *)vidout->enc,
      (GstCaps *)vidout->caps);
  gst_caps_unref((GstCaps *)vidout->caps);
  if (link_ok != TRUE) {
    g_printerr("Source and encoder could not be linked\n");
    goto error;
  }
  link_ok = gst_element_link_many(
      (GstElement *)vidout->enc,
      (GstElement *)vidout->mux,
      (GstElement *)vidout->sink,
      NULL);
  if (link_ok != TRUE) {
    g_printerr("Encoder, mux, and sink could not be linked\n");
    goto error;
  }

  // start playing
  printf("playing\n");
  ret = gst_element_set_state((GstElement *)vidout->pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr("Unable to set the pipeline to the playing state\n");
    goto error;
  }
  return 0;

error:
  gst_object_unref((GstElement *)vidout->pipeline);
  memset(vidout, 0, sizeof(rtsplink_t));
  return -1;
}
Example #11
0
GstElement * _owr_payload_create_encoder(OwrPayload *payload)
{
    GstElement *encoder = NULL;
    gchar *element_name = NULL;
    GstElementFactory *factory;
    const gchar *factory_name;
    gint cpu_used;

    g_return_val_if_fail(payload, NULL);

    switch (payload->priv->codec_type) {
    case OWR_CODEC_TYPE_H264:
        encoder = try_codecs(h264_encoders, "encoder");
        g_return_val_if_fail(encoder, NULL);

        factory = gst_element_get_factory(encoder);
        factory_name = gst_plugin_feature_get_name(factory);

        if (!strcmp(factory_name, "openh264enc")) {
            g_object_set(encoder, "gop-size", 0, NULL);
            gst_util_set_object_arg(G_OBJECT(encoder), "rate-control", "bitrate");
            gst_util_set_object_arg(G_OBJECT(encoder), "complexity", "low");
            g_object_bind_property(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE);
        } else if (!strcmp(factory_name, "x264enc")) {
            g_object_bind_property_full(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE,
                binding_transform_to_kbps, NULL, NULL, NULL);
            gst_util_set_object_arg(G_OBJECT(encoder), "speed-preset", "ultrafast");
            gst_util_set_object_arg(G_OBJECT(encoder), "tune", "fastdecode+zerolatency");
        } else if (!strcmp(factory_name, "vtenc_h264")) {
            g_object_bind_property_full(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE,
                binding_transform_to_kbps, NULL, NULL, NULL);
            g_object_set(encoder,
                "allow-frame-reordering", FALSE,
                "realtime", TRUE,
#if defined(__APPLE__) && TARGET_OS_IPHONE
                "quality", 0.0,
#else
                "quality", 0.5,
#endif
                "max-keyframe-interval", G_MAXINT,
                NULL);
        } else if (!strcmp(factory_name, "omxh264enc")) {
            g_object_set(encoder, "control-rate", 2, NULL);
            g_object_bind_property(payload, "bitrate", encoder, "target-bitrate", G_BINDING_SYNC_CREATE);
        } else {
            /* Assume bits/s instead of kbit/s */
            g_object_bind_property(payload, "bitrate", encoder, "bitrate", G_BINDING_SYNC_CREATE);
        }
        g_object_set(payload, "bitrate", evaluate_bitrate_from_payload(payload), NULL);
        //make_preset = TRUE;
        break;

    case OWR_CODEC_TYPE_VP8:
        encoder = try_codecs(vp8_encoders, "encoder");
        g_return_val_if_fail(encoder, NULL);

        factory = gst_element_get_factory(encoder);
        factory_name = gst_plugin_feature_get_name(factory);
#if (defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR) || defined(__ANDROID__)
        cpu_used = -12; /* Mobile */
#else
        cpu_used = -6; /* Desktop */
#endif
        if (!strcmp(factory_name, "omxvp8enc"))
        {
            g_object_set(encoder, "control-rate", 2, NULL);
        }
        else // vp8enc
        {
            /* values are inspired by webrtc.org values in vp8_impl.cc */
            g_object_set(encoder,
                "end-usage", 1, /* VPX_CBR */
                "deadline", G_GINT64_CONSTANT(1), /* VPX_DL_REALTIME */
                "cpu-used", cpu_used,
                "min-quantizer", 2,
                "buffer-initial-size", 500,
                "buffer-optimal-size", 600,
                "buffer-size", 1000,
                "lag-in-frames", 0,
                "timebase", 1, 90000,
                "error-resilient", 1,
                "keyframe-mode", 0, /* VPX_KF_DISABLED */
                NULL);
        }

        g_object_bind_property(payload, "bitrate", encoder, "target-bitrate", G_BINDING_SYNC_CREATE);
        g_object_set(payload, "bitrate", evaluate_bitrate_from_payload(payload), NULL);
        //make_preset = TRUE;
        break;
    default:
        element_name = g_strdup_printf("encoder_%s_%u", OwrCodecTypeEncoderElementName[payload->priv->codec_type], get_unique_id());
        encoder = gst_element_factory_make(OwrCodecTypeEncoderElementName[payload->priv->codec_type], element_name);
        g_free(element_name);
        g_return_val_if_fail(encoder, NULL);
        break;
    }

    return encoder;
}
int
main (int argc, char **argv)
{
  GOptionEntry options[] = {
    {NULL}
  };
  GOptionContext *ctx;
  GError *err = NULL;
  GstElement *src, *q, *capsfilter, *sink;
  GstElement *pipeline;
  GstPad *src_pad;
  GstCaps *filter_caps;

  ctx = g_option_context_new ("");
  g_option_context_add_main_entries (ctx, options, GETTEXT_PACKAGE);
  g_option_context_add_group (ctx, gst_init_get_option_group ());
  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing: %s\n", err->message);
    return 1;
  }
  g_option_context_free (ctx);

  logo_buf = create_overlay_buffer ();

  main_loop = g_main_loop_new (NULL, FALSE);

  pipeline = gst_pipeline_new ("pipeline");

  src = gst_element_factory_make ("videotestsrc", NULL);
  gst_util_set_object_arg (G_OBJECT (src), "pattern", "white");

  src_pad = gst_element_get_static_pad (src, "src");
  gst_pad_add_probe (src_pad, GST_PAD_PROBE_TYPE_BUFFER, buffer_cb,
      main_loop, NULL);
  gst_object_unref (src_pad);

  q = gst_element_factory_make ("queue", NULL);

  capsfilter = gst_element_factory_make ("capsfilter", NULL);
  filter_caps = gst_caps_from_string ("video/x-raw, format = "
      GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS);
  gst_caps_set_simple (filter_caps,
      "width", G_TYPE_INT, VIDEO_WIDTH,
      "height", G_TYPE_INT, VIDEO_HEIGHT,
      "framerate", GST_TYPE_FRACTION, VIDEO_FPS, 1, NULL);
  g_object_set (capsfilter, "caps", filter_caps, NULL);
  gst_caps_unref (filter_caps);

  sink = gst_element_factory_make ("ximagesink", NULL);

  gst_bin_add_many (GST_BIN (pipeline), src, q, capsfilter, sink, NULL);

  gst_element_link_many (src, q, capsfilter, sink, NULL);

  count = 0;

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, main_loop);

  g_main_loop_run (main_loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  gst_buffer_unref (logo_buf);

  return 0;
}