Beispiel #1
0
static void
handle_set_media_message (SnraClient * client, GstStructure * s)
{
  const gchar *protocol, *path;
  int port;
  GstClockTime base_time;
  gint64 tmp;
  gchar *uri;
  gboolean paused;

  protocol = gst_structure_get_string (s, "resource-protocol");
  path = gst_structure_get_string (s, "resource-path");

  if (protocol == NULL || path == NULL)
    return;                     /* Invalid message */

  if (!snra_json_structure_get_int (s, "resource-port", &port))
    return;

  if (!snra_json_structure_get_int64 (s, "base-time", &tmp))
    return;                     /* Invalid message */

  if (!snra_json_structure_get_boolean (s, "paused", &paused))
    return;

  base_time = (GstClockTime) (tmp);

  if (client->player == NULL) {
    construct_player (client);
    if (client->player == NULL)
      return;
  } else {
    gst_element_set_state (client->player, GST_STATE_NULL);
  }

  uri =
      g_strdup_printf ("%s://%s:%d%s", protocol, client->connected_server, port,
      path);
  g_print ("Playing URI %s base_time %" GST_TIME_FORMAT "\n", uri,
      GST_TIME_ARGS (base_time));
  g_object_set (client->player, "uri", uri, NULL);
  g_free (uri);

  gst_element_set_start_time (client->player, GST_CLOCK_TIME_NONE);
  gst_element_set_base_time (client->player, base_time);
  gst_pipeline_use_clock (GST_PIPELINE (client->player), client->net_clock);

  if (client->enabled) {
    if (paused)
      client->state = GST_STATE_PAUSED;
    else
      client->state = GST_STATE_PLAYING;
  } else {
    client->state = DISABLED_STATE;
  }

  gst_element_set_state (client->player, client->state);
}
void cmd_do_clock(gchar **arg_strv, gint arg_num)
{
  g_print ("%s %s %s %s\n", __func__, arg_strv[0], arg_strv[1], arg_strv[2]);
  client_clock = gst_net_client_clock_new (NULL, arg_strv[1], atoi(arg_strv[2]), 0);
  g_usleep (G_USEC_PER_SEC / 2);

  gst_pipeline_use_clock (GST_PIPELINE (gst_data.playbin), client_clock);
  //gst_element_set_start_time (gst_data.playbin, GST_CLOCK_TIME_NONE);
  //gst_pipeline_set_latency (GST_PIPELINE (gst_data.playbin), GST_SECOND / 2);
}
Beispiel #3
0
static void
set_media (AurClient * client)
{
  if (client->player == NULL) {
    construct_player (client);
    if (client->player == NULL)
      return;
  }

  gst_element_set_state (client->player, GST_STATE_READY);

  g_print ("Setting media URI %s base_time %" GST_TIME_FORMAT " position %"
      GST_TIME_FORMAT " paused %i\n", client->uri,
      GST_TIME_ARGS (client->base_time), GST_TIME_ARGS (client->position),
      client->paused);
  g_object_set (client->player, "uri", client->uri, NULL);

  gst_element_set_start_time (client->player, GST_CLOCK_TIME_NONE);
  gst_pipeline_use_clock (GST_PIPELINE (client->player), client->net_clock);

  /* Do the preroll */
  gst_element_set_state (client->player, GST_STATE_PAUSED);
  gst_element_get_state (client->player, NULL, NULL, GST_CLOCK_TIME_NONE);

  /* Compensate preroll time if playing */
  if (!client->paused) {
    GstClockTime now = gst_clock_get_time (client->net_clock);
    if (now > (client->base_time + client->position))
      client->position = now - client->base_time;
  }

  /* If position is off by more than 0.5 sec, seek to that position
   * (otherwise, just let the player skip) */
  if (client->position > GST_SECOND/2) {
    /* FIXME Query duration, so we don't seek after EOS */
    if (!gst_element_seek_simple (client->player, GST_FORMAT_TIME,
            GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, client->position)) {
      g_warning ("Initial seekd failed, player will go faster instead");
      client->position = 0;
    }
  }

  /* Set base time considering seek position after seek */
  gst_element_set_base_time (client->player,
      client->base_time + client->position);

  /* Before we start playing, ensure we have selected the right audio track */
  set_language (client);

  if (!client->paused)
    gst_element_set_state (client->player, GST_STATE_PLAYING);
}
static void
gst_uri_transcode_bin_constructed (GObject * object)
{
#if HAVE_GETRUSAGE
  GstUriTranscodeBin *self = GST_URI_TRANSCODE_BIN (object);

  self->cpu_clock =
      GST_CLOCK (gst_cpu_throttling_clock_new (self->wanted_cpu_usage));
  gst_pipeline_use_clock (GST_PIPELINE (self), self->cpu_clock);
#endif

  ((GObjectClass *) parent_class)->constructed (object);
}
static GstElement *
create_pipeline (GstRTSPMediaFactory * factory, GstRTSPMedia * media)
{
  GstElement *pipeline;

  pipeline = gst_pipeline_new ("media-pipeline");
  if (global_clock == NULL) {
    g_print ("No clock!!!!\n");
  } else {
    gst_pipeline_use_clock (GST_PIPELINE (pipeline), global_clock);
  }
  gst_rtsp_media_take_pipeline (media, GST_PIPELINE_CAST (pipeline));

  return pipeline;
}
MediaPipelineImpl::MediaPipelineImpl (const boost::property_tree::ptree &config)
  : MediaObjectImpl (config)
{
  GstClock *clock;

  pipeline = gst_pipeline_new (NULL);

  if (pipeline == NULL) {
    throw KurentoException (MEDIA_OBJECT_NOT_AVAILABLE,
                            "Cannot create gstreamer pipeline");
  }

  clock = gst_system_clock_obtain ();
  gst_pipeline_use_clock (GST_PIPELINE (pipeline), clock);
  g_object_unref (clock);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  busMessageHandler = 0;
}
int main(int argc, char *argv[]) {
  GMainLoop *main_loop;
  GstClock *client_clock, *tmp_clock;
  GstNetTimeProvider *prov_clock;
  guint16 clock_port;
  GstClockTime base_time;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  prov_clock = create_net_clock (&clock_port);
  client_clock = gst_net_client_clock_new (NULL, "127.0.0.1", clock_port, 0);

  /* Wait 0.5 seconds for the clock to stabilise */
  g_usleep (G_USEC_PER_SEC / 2);
  base_time = share_base_time (clock_port, prov_clock);

  /* Create the elements */
  playbin = gst_element_factory_make ("playbin", "playbin");
  g_object_set (playbin, "uri", "file:///home/luisbg/samples/big_buck_bunny_1080p_h264.mov", NULL);

  gst_pipeline_use_clock (GST_PIPELINE (playbin), client_clock);
  gst_element_set_base_time (playbin, base_time);
  gst_element_set_start_time (playbin, GST_CLOCK_TIME_NONE);
  gst_pipeline_set_latency (GST_PIPELINE (playbin), GST_SECOND / 2);

  gst_element_set_state (playbin, GST_STATE_PLAYING);

  /* Create a GLib Main Loop and set it to run */
  main_loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (main_loop);

  /* Free resources */
  g_main_loop_unref (main_loop);
  gst_element_set_state (playbin, GST_STATE_NULL);
  gst_object_unref (playbin);
  return 0;
}
Beispiel #8
0
static void owr_media_renderer_init(OwrMediaRenderer *renderer)
{
    OwrMediaRendererPrivate *priv;
    GstBus *bus;
    GSource *bus_source;
    gchar *bin_name;

    renderer->priv = priv = OWR_MEDIA_RENDERER_GET_PRIVATE(renderer);

    priv->media_type = DEFAULT_MEDIA_TYPE;
    priv->source = DEFAULT_SOURCE;
    priv->disabled = DEFAULT_DISABLED;

    priv->message_origin_bus_set = owr_message_origin_bus_set_new();

    bin_name = g_strdup_printf("media-renderer-%u", g_atomic_int_add(&unique_bin_id, 1));
    priv->pipeline = gst_pipeline_new(bin_name);
    gst_pipeline_use_clock(GST_PIPELINE(priv->pipeline), gst_system_clock_obtain());
    gst_element_set_base_time(priv->pipeline, _owr_get_base_time());
    gst_element_set_start_time(priv->pipeline, GST_CLOCK_TIME_NONE);
    g_free(bin_name);

#ifdef OWR_DEBUG
    g_signal_connect(priv->pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

    priv->sink = NULL;
    priv->src = NULL;

    bus = gst_pipeline_get_bus(GST_PIPELINE(priv->pipeline));
    bus_source = gst_bus_create_watch(bus);
    g_source_set_callback(bus_source, (GSourceFunc) bus_call, priv->pipeline, NULL);
    g_source_attach(bus_source, _owr_get_main_context());
    g_source_unref(bus_source);

    g_mutex_init(&priv->media_renderer_lock);
}
/*
 * owr_local_media_source_get_pad
 *
 * The beginning of a media source chain in the pipeline looks like this:
 *                                                             +------------+
 *                                                         /---+ inter*sink |
 * +--------+    +--------+   +------------+   +-----+    /    +------------+
 * | source +----+ scale? +---+ capsfilter +---+ tee +---/
 * +--------+    +--------+   +------------+   +-----+   \
 *                                                        \    +------------+
 *                                                         \---+ inter*sink |
 *                                                             +------------+
 *
 * For each newly requested pad a new inter*sink is added to the tee.
 * Note that this is a completely independent pipeline, and the complete
 * pipeline is only created once for a specific media source.
 *
 * Then for each newly requested pad another bin with a inter*src is
 * created, which is then going to be part of the transport agent
 * pipeline. The ghostpad of it is what we return here.
 *
 * +-----------+   +-------------------------------+   +----------+
 * | inter*src +---+ converters/queues/capsfilters +---+ ghostpad |
 * +-----------+   +-------------------------------+   +----------+
 *
 */
static GstElement *owr_local_media_source_request_source(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrLocalMediaSource *local_source;
    OwrLocalMediaSourcePrivate *priv;
    GstElement *source_element = NULL;
    GstElement *source_pipeline;
    GHashTable *event_data;
    GValue *value;
#if defined(__linux__) && !defined(__ANDROID__)
    gchar *tmp;
#endif

    g_assert(media_source);
    local_source = OWR_LOCAL_MEDIA_SOURCE(media_source);
    priv = local_source->priv;

    /* only create the source bin for this media source once */
    if ((source_pipeline = _owr_media_source_get_source_bin(media_source)))
        GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin");
    else {
        OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN;
        OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN;
        GstElement *source, *source_process = NULL, *capsfilter = NULL, *tee;
        GstPad *sinkpad, *source_pad;
        GEnumClass *media_enum_class, *source_enum_class;
        GEnumValue *media_enum_value, *source_enum_value;
        gchar *bin_name;
        GstCaps *source_caps;
        GstBus *bus;
        GSource *bus_source;

        event_data = _owr_value_table_new();
        value = _owr_value_table_add(event_data, "start_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());

        g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL);

        media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE));
        source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE));
        media_enum_value = g_enum_get_value(media_enum_class, media_type);
        source_enum_value = g_enum_get_value(source_enum_class, source_type);

        bin_name = g_strdup_printf("local-%s-%s-source-bin-%u",
            media_enum_value ? media_enum_value->value_nick : "unknown",
            source_enum_value ? source_enum_value->value_nick : "unknown",
            g_atomic_int_add(&unique_bin_id, 1));

        g_type_class_unref(media_enum_class);
        g_type_class_unref(source_enum_class);

        source_pipeline = gst_pipeline_new(bin_name);
        gst_pipeline_use_clock(GST_PIPELINE(source_pipeline), gst_system_clock_obtain());
        gst_element_set_base_time(source_pipeline, _owr_get_base_time());
        gst_element_set_start_time(source_pipeline, GST_CLOCK_TIME_NONE);
        g_free(bin_name);
        bin_name = NULL;

#ifdef OWR_DEBUG
        g_signal_connect(source_pipeline, "deep-notify", G_CALLBACK(_owr_deep_notify), NULL);
#endif

        bus = gst_pipeline_get_bus(GST_PIPELINE(source_pipeline));
        bus_source = gst_bus_create_watch(bus);
        g_source_set_callback(bus_source, (GSourceFunc) bus_call, media_source, NULL);
        g_source_attach(bus_source, _owr_get_main_context());
        g_source_unref(bus_source);

        GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type);

        if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) {
            GST_ERROR_OBJECT(local_source,
                "Cannot connect source with unknown type or media type to other component");
            goto done;
        }

        switch (media_type) {
        case OWR_MEDIA_TYPE_AUDIO:
            {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");
#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR
/*
    Default values for buffer-time and latency-time on android are 200ms and 20ms.
    The minimum latency-time that can be used on Android is 20ms, and using
    a 40ms buffer-time with a 20ms latency-time causes crackling audio.
    So let's just stick with the defaults.
*/
#if !defined(__ANDROID__)
                g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000),
                    "latency-time", G_GINT64_CONSTANT(10000), NULL);
#endif
                if (priv->device_index > -1) {
#ifdef __APPLE__
                    g_object_set(source, "device", priv->device_index, NULL);
#elif defined(__linux__) && !defined(__ANDROID__)
                    tmp = g_strdup_printf("%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "audiotestsrc", "audio-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            break;
            }
        case OWR_MEDIA_TYPE_VIDEO:
        {
            GstPad *srcpad;
            GstCaps *device_caps;

            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, VIDEO_SRC, "video-source");
                if (priv->device_index > -1) {
#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
                    g_object_set(source, "device-index", priv->device_index, NULL);
#elif defined(__ANDROID__)
                    g_object_set(source, "cam-index", priv->device_index, NULL);
#elif defined(__linux__)
                    tmp = g_strdup_printf("/dev/video%d", priv->device_index);
                    g_object_set(source, "device", tmp, NULL);
                    g_free(tmp);
#endif
                }
                break;
            case OWR_SOURCE_TYPE_TEST: {
                GstElement *src, *time;
                GstPad *srcpad;

                source = gst_bin_new("video-source");

                CREATE_ELEMENT(src, "videotestsrc", "videotestsrc");
                g_object_set(src, "is-live", TRUE, NULL);
                gst_bin_add(GST_BIN(source), src);

                time = gst_element_factory_make("timeoverlay", "timeoverlay");
                if (time) {
                    g_object_set(time, "font-desc", "Sans 60", NULL);
                    gst_bin_add(GST_BIN(source), time);
                    gst_element_link(src, time);
                    srcpad = gst_element_get_static_pad(time, "src");
                } else
                    srcpad = gst_element_get_static_pad(src, "src");

                gst_element_add_pad(source, gst_ghost_pad_new("src", srcpad));
                gst_object_unref(srcpad);

                break;
            }
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            /* First try to see if we can just get the format we want directly */

            source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
            gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#else
            _owr_gst_caps_foreach(caps, fix_video_caps_framerate, source_caps);
#endif
            /* Now see what the device can really produce */
            srcpad = gst_element_get_static_pad(source, "src");
            gst_element_set_state(source, GST_STATE_READY);
            device_caps = gst_pad_query_caps(srcpad, source_caps);

            if (gst_caps_is_empty(device_caps)) {
                /* Let's see if it works when we drop format constraints (which can be dealt with downsteram) */
                GstCaps *tmp = source_caps;
                source_caps = gst_caps_new_empty();
#if GST_CHECK_VERSION(1, 5, 0)
                gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#else
                _owr_gst_caps_foreach(tmp, fix_video_caps_format, source_caps);
#endif
                gst_caps_unref(tmp);

                gst_caps_unref(device_caps);
                device_caps = gst_pad_query_caps(srcpad, source_caps);

                if (gst_caps_is_empty(device_caps)) {
                    /* Accepting any format didn't work, we're going to hope that scaling fixes it */
                    CREATE_ELEMENT(source_process, "videoscale", "video-source-scale");
                    gst_bin_add(GST_BIN(source_pipeline), source_process);
                }
            }

            gst_caps_unref(device_caps);
            gst_object_unref(srcpad);

#if defined(__APPLE__) && TARGET_OS_IPHONE && !TARGET_IPHONE_SIMULATOR
            /* Force NV12 on iOS else the source can negotiate BGRA
             * ercolorspace can do NV12 -> BGRA and NV12 -> I420 which is what
             * is needed for Bowser */
            gst_caps_set_simple(source_caps, "format", G_TYPE_STRING, "NV12", NULL);
#endif

            CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_pipeline), capsfilter);

            break;
        }
        case OWR_MEDIA_TYPE_UNKNOWN:
        default:
            g_assert_not_reached();
            goto done;
        }
        g_assert(source);

        source_pad = gst_element_get_static_pad(source, "src");
        g_signal_connect(source_pad, "notify::caps", G_CALLBACK(on_caps), media_source);
        gst_object_unref(source_pad);

        CREATE_ELEMENT(tee, "tee", "source-tee");
        g_object_set(tee, "allow-not-linked", TRUE, NULL);

        gst_bin_add_many(GST_BIN(source_pipeline), source, tee, NULL);

        /* Many sources don't like reconfiguration and it's pointless
         * here anyway right now. No need to reconfigure whenever something
         * is added to the tee or removed.
         * We will have to implement reconfiguration differently later by
         * selecting the best caps based on all consumers.
         */
        sinkpad = gst_element_get_static_pad(tee, "sink");
        gst_pad_add_probe(sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, drop_reconfigure_event, NULL, NULL);
        gst_object_unref(sinkpad);

        if (!source)
            GST_ERROR_OBJECT(media_source, "Failed to create source element!");

        if (capsfilter) {
            LINK_ELEMENTS(capsfilter, tee);
            if (source_process) {
                LINK_ELEMENTS(source_process, capsfilter);
                LINK_ELEMENTS(source, source_process);
            } else
                LINK_ELEMENTS(source, capsfilter);
        } else if (source_process) {
            LINK_ELEMENTS(source_process, tee);
            LINK_ELEMENTS(source, source_process);
        } else
            LINK_ELEMENTS(source, tee);

        gst_element_sync_state_with_parent(tee);
        if (capsfilter)
            gst_element_sync_state_with_parent(capsfilter);
        if (source_process)
            gst_element_sync_state_with_parent(source_process);
        gst_element_sync_state_with_parent(source);

        _owr_media_source_set_source_bin(media_source, source_pipeline);
        _owr_media_source_set_source_tee(media_source, tee);
        if (gst_element_set_state(source_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
            GST_ERROR("Failed to set local source pipeline %s to playing", GST_OBJECT_NAME(source_pipeline));
            /* FIXME: We should handle this and don't expose the source */
        }

        value = _owr_value_table_add(event_data, "end_time", G_TYPE_INT64);
        g_value_set_int64(value, g_get_monotonic_time());
        OWR_POST_EVENT(media_source, LOCAL_SOURCE_STARTED, event_data);

        g_signal_connect(tee, "pad-removed", G_CALLBACK(tee_pad_removed_cb), media_source);
    }
    gst_object_unref(source_pipeline);

    source_element = OWR_MEDIA_SOURCE_CLASS(owr_local_media_source_parent_class)->request_source(media_source, caps);

done:
    return source_element;
}
bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        printf("Cannot create Gstreamer pipeline\n");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create v4l2src
    GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL);
    if (v4l2src == NULL)
    {
        printf("Cannot create v4l2src\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream cameraDev;
    cameraDev << "/dev/video" << cameraIdx;
    g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), v4l2src);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        printf("Cannot create %s element\n", COLOR_ELEM);
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), color);

    // create appsink element
    sink = gst_element_factory_make("appsink", NULL);
    if (sink == NULL)
    {
        printf("Cannot create appsink element\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), sink);

    // if initial values for FrameSource::Parameters are not
    // specified, let's set them manually to prevent very huge images
    if (configuration.frameWidth == (vx_uint32)-1)
        configuration.frameWidth = 1920;
    if (configuration.frameHeight == (vx_uint32)-1)
        configuration.frameHeight = 1080;
    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = 30;

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_v42lsrc = gst_caps_new_simple ("video/x-raw-rgb",
                 "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth,
                 "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight,
                 "framerate", GST_TYPE_FRACTION, (int)configuration.fps,
                 NULL);
#else
    std::ostringstream stream;
    stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth <<
              "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";

    GstCaps* caps_v42lsrc = gst_caps_from_string(stream.str().c_str());
#endif

    if (caps_v42lsrc == NULL)
    {
        printf("Failed to create caps\n");
        FinalizeGstPipeLine();

        return false;
    }

    // link elements
    if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc))
    {
        printf("GStreamer: cannot link v4l2src -> color using caps\n");
        FinalizeGstPipeLine();
        gst_caps_unref(caps_v42lsrc);

        return false;
    }
    gst_caps_unref(caps_v42lsrc);

    // link elements
    if (!gst_element_link(color, sink))
    {
        printf("GStreamer: cannot link color -> appsink\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), true);

    // do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_appsink = gst_caps_new_simple("video/x-raw-rgb",
                                                "bpp",        G_TYPE_INT, 24,
                                                "red_mask",   G_TYPE_INT, 0xFF0000,
                                                "green_mask", G_TYPE_INT, 0x00FF00,
                                                "blue_mask",  G_TYPE_INT, 0x0000FF,
                                                NULL);
#else
    // support 1 and 3 channel 8 bit data
    GstCaps* caps_appsink = gst_caps_from_string("video/x-raw, format=(string){RGB, GRAY8};");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink);
    gst_caps_unref(caps_appsink);

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        printf("GStreamer: unable to start playback\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::unique_ptr<GstPad, GStreamerObjectDeleter> pad(gst_element_get_static_pad(color, "src"));
#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_caps(pad.get()));
#else
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_current_caps(pad.get()));
#endif

    const GstStructure *structure = gst_caps_get_structure(bufferCaps.get(), 0);

    int width, height;
    if (!gst_structure_get_int(structure, "width", &width))
    {
        handleGStreamerMessages();
        printf("Cannot query video width\n");
    }

    if (!gst_structure_get_int(structure, "height", &height))
    {
        handleGStreamerMessages();
        printf("Cannot query video height\n");
    }

    configuration.frameWidth = static_cast<vx_uint32>(width);
    configuration.frameHeight = static_cast<vx_uint32>(height);

    gint num = 0, denom = 1;
    if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
    {
        handleGStreamerMessages();
        printf("Cannot query video fps\n");
    }

    configuration.fps = static_cast<float>(num) / denom;
    end = false;

    return true;
}
Beispiel #11
0
/*!
 * \brief GstOutput::openAppSrc
 * \param filename filename to output to
 * \param fourcc desired codec fourcc
 * \param fps desired framerate
 * \param frameSize the size of the expected frames
 * \param is_color color or grayscale
 * \return success
 *
 *
 */
bool GstShow::open(int xwinid, std::string Laddress, std::string Raddress)
{
    
    // init gstreamer
    gst_initializer::init();

    // init vars
    //int  bufsize = 0;

    bool file = false;
    char *uriL = NULL;
    char *uriR = NULL;
    
    init_pipeline(xwinid);
    
    if(!gst_uri_is_valid(Laddress.c_str()) || !gst_uri_is_valid(Raddress.c_str()))
    {
        uriL = realpath(Laddress.c_str(), NULL);
        uriR = realpath(Raddress.c_str(), NULL);
        if(uriL != NULL && uriR != NULL)
        {
            uriL = g_filename_to_uri(uriL, NULL, NULL);
            uriR = g_filename_to_uri(uriR, NULL, NULL);
            if(uriL != NULL && uriR != NULL)
            {
                file = true;
            }
            else
            {
                L_(lerror) << "GStreamer: Error opening file";
                close();
                return false;
            }
        }
    } else {
        file = false;
        uriL = g_strdup(Laddress.c_str());        
        uriR = g_strdup(Raddress.c_str());        
    }
    if (!file)
    {
        global_clock = gst_system_clock_obtain ();
        gst_net_time_provider_new (global_clock, "0.0.0.0", 8554);
        if (global_clock != NULL)
        L_(ldebug2) << ("Clock created!");
        else
        {
        L_(lerror) << ("Could not creaye clock!");
        return false;
        }
        gst_pipeline_use_clock (GST_PIPELINE (pipeline), global_clock);
    }
        
    
    for (int i = 0; i<2; i++)
    {
        source[i] = gst_element_factory_make("uridecodebin", NULL);

        gst_bin_add_many(GST_BIN(pipeline), source[i], NULL);
        
        if (i==0)
            g_object_set (source[i], "uri", uriL, NULL);
        if (i==1)
            g_object_set (source[i], "uri", uriR, NULL);
            
        //sync if not file!
        if (!file)
        {
            g_signal_connect (source[i], "source-setup", G_CALLBACK (source_created), NULL);
        }
        
        //Set latency if we have a stream
        if (!file)
        {
            /* Set this high enough so that it's higher than the minimum latency
            * on all receivers */
            gst_pipeline_set_latency (GST_PIPELINE (pipeline), PIPELINE_LATENCY_MS * GST_MSECOND);
        }        
        
        g_signal_connect(source[i], "pad-added", G_CALLBACK(newPad), queue[i]);
        
    }

    
    return finish_pipeline();


}
bool GStreamerNvCameraFrameSourceImpl::InitializeGstPipeLine()
{
    // select config with max FPS value to be default
    NvCameraConfigs nvcameraconfig = configs[2];

    // use user specified camera config
    if ( (configuration.frameWidth != (vx_uint32)-1) &&
         (configuration.frameHeight != (vx_uint32)-1) )
    {
        nvcameraconfig.frameWidth = configuration.frameWidth;
        nvcameraconfig.frameHeight = configuration.frameHeight;
        nvcameraconfig.fps = 30;

        // select FPS default for the specified config
        for (vx_size i = 0; i < dimOf(configs); ++i)
        {
            if ((nvcameraconfig.frameWidth == configs[i].frameWidth) &&
                (nvcameraconfig.frameHeight == configs[i].frameHeight))
            {
                nvcameraconfig.fps = configs[i].fps;
                break;
            }
        }
    }

    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = nvcameraconfig.fps;

    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create nvcamerasrc
    GstElement * nvcamerasrc = gst_element_factory_make("nvcamerasrc", NULL);
    if (nvcamerasrc == NULL)
    {
        NVXIO_PRINT("Cannot create nvcamerasrc");
        NVXIO_PRINT("\"nvcamerasrc\" element is not available on this platform");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream stream;
    stream << configuration.fps << " " << configuration.fps;
    std::string fpsRange = stream.str();

    g_object_set(G_OBJECT(nvcamerasrc), "sensor-id", cameraIdx, NULL);
    g_object_set(G_OBJECT(nvcamerasrc), "fpsRange", fpsRange.c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), nvcamerasrc);

    // create nvvideosink element
    GstElement * nvvideosink = gst_element_factory_make("nvvideosink", NULL);
    if (nvvideosink == NULL)
    {
        NVXIO_PRINT("Cannot create nvvideosink element");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(nvvideosink), "display", context.display, NULL);
    g_object_set(G_OBJECT(nvvideosink), "stream", context.stream, NULL);
    g_object_set(G_OBJECT(nvvideosink), "fifo", fifoMode, NULL);
    g_object_set(G_OBJECT(nvvideosink), "max-lateness", -1, NULL);
    g_object_set(G_OBJECT(nvvideosink), "throttle-time", 0, NULL);
    g_object_set(G_OBJECT(nvvideosink), "render-delay", 0, NULL);
    g_object_set(G_OBJECT(nvvideosink), "qos", FALSE, NULL);
    g_object_set(G_OBJECT(nvvideosink), "sync", FALSE, NULL);
    g_object_set(G_OBJECT(nvvideosink), "async", TRUE, NULL);

    gst_bin_add(GST_BIN(pipeline), nvvideosink);

    // link elements
    stream.str(std::string());
    stream << "video/x-raw(memory:NVMM), width=(int)" << nvcameraconfig.frameWidth << ", "
              "height=(int)" << nvcameraconfig.frameHeight << ", format=(string){I420}, "
              "framerate=(fraction)" << nvcameraconfig.fps << "/1;";

    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string(stream.str().c_str()));

    if (!caps_nvvidconv)
    {
        NVXIO_PRINT("Failed to create caps");
        FinalizeGstPipeLine();

        return false;
    }

    if (!gst_element_link_filtered(nvcamerasrc, nvvideosink, caps_nvvidconv.get()))
    {
        NVXIO_PRINT("GStreamer: cannot link nvvidconv -> nvvideosink using caps");
        FinalizeGstPipeLine();

        return false;
    }

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        NVXIO_PRINT("GStreamer: unable to start playback");
        FinalizeGstPipeLine();

        return false;
    }

    vx_uint32 initialFPS = configuration.fps;

    if (!updateConfiguration(nvcamerasrc, configuration))
    {
        FinalizeGstPipeLine();
        return false;
    }

    // if initialFPS is specified, we should use this, because
    // retrieved via the updateConfiguration function FPS corresponds
    // to camera config FPS
    if (initialFPS != (vx_uint32)-1)
        configuration.fps = initialFPS;

    end = false;

    return true;
}
bool GStreamerOpenMAXFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    std::string uri;
    if (!gst_uri_is_valid(fileName.c_str()))
    {
        char* real = realpath(fileName.c_str(), NULL);

        if (!real)
        {
            NVXIO_PRINT("Can't resolve path \"%s\": %s", fileName.c_str(), strerror(errno));
            return false;
        }

        std::unique_ptr<char[], GlibDeleter> pUri(g_filename_to_uri(real, NULL, NULL));
        free(real);
        uri = pUri.get();
    }
    else
    {
        uri = fileName;
    }

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create uridecodebin
    GstBin * uriDecodeBin = GST_BIN(gst_element_factory_make("uridecodebin", NULL));
    if (uriDecodeBin == NULL)
    {
        NVXIO_PRINT("Cannot create uridecodebin");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(uriDecodeBin), "uri", uri.c_str(), NULL);
    g_object_set(G_OBJECT(uriDecodeBin), "message-forward", TRUE, NULL);

    gst_bin_add(GST_BIN(pipeline), GST_ELEMENT(uriDecodeBin));

    // create nvvidconv
    GstElement * nvvidconv = gst_element_factory_make("nvvidconv", NULL);
    if (nvvidconv == NULL)
    {
        NVXIO_PRINT("Cannot create nvvidconv");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), nvvidconv);

      // create nvvideosink element
    GstElement * nvvideosink = gst_element_factory_make("nvvideosink", NULL);
    if (nvvideosink == NULL)
    {
        NVXIO_PRINT("Cannot create nvvideosink element");
        FinalizeGstPipeLine();

        return false;
    }

    g_object_set(G_OBJECT(nvvideosink), "display", context.display, NULL);
    g_object_set(G_OBJECT(nvvideosink), "stream", context.stream, NULL);
    g_object_set(G_OBJECT(nvvideosink), "fifo", fifoMode, NULL);

    gst_bin_add(GST_BIN(pipeline), nvvideosink);

    g_signal_connect(uriDecodeBin, "autoplug-select", G_CALLBACK(GStreamerOpenMAXFrameSourceImpl::autoPlugSelect), NULL);
    g_signal_connect(uriDecodeBin, "pad-added", G_CALLBACK(GStreamerBaseFrameSourceImpl::newGstreamerPad), nvvidconv);

    std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(
        gst_caps_from_string("video/x-raw(memory:NVMM), format=(string){I420}"));

    // link nvvidconv using caps
    if (!gst_element_link_filtered(nvvidconv, nvvideosink, caps_nvvidconv.get()))
    {
        NVXIO_PRINT("GStreamer: cannot link nvvidconv -> nvvideosink");
        FinalizeGstPipeLine();

        return false;
    }

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);

    handleGStreamerMessages();
    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        NVXIO_PRINT("GStreamer: unable to start playback");
        FinalizeGstPipeLine();

        return false;
    }

    // GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "gst_pipeline");

    if (!updateConfiguration(nvvidconv, configuration))
    {
        FinalizeGstPipeLine();
        return false;
    }

    end = false;

    return true;
}
bool nvxio::GStreamerVideoRenderImpl::InitializeGStreamerPipeline()
{
    std::ostringstream stream;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        NVXIO_PRINT("Cannot create Gstreamer pipeline");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create appsrc
    GstElement * appsrcelem = gst_element_factory_make("appsrc", NULL);
    if (appsrcelem == NULL)
    {
        NVXIO_PRINT("Cannot create appsrc");
        FinalizeGStreamerPipeline();

        return false;
    }

    g_object_set(G_OBJECT(appsrcelem), "is-live", 0, NULL);
    g_object_set(G_OBJECT(appsrcelem), "num-buffers", -1, NULL);
    g_object_set(G_OBJECT(appsrcelem), "emit-signals", 0, NULL);
    g_object_set(G_OBJECT(appsrcelem), "block", 1, NULL);
    g_object_set(G_OBJECT(appsrcelem), "size", static_cast<guint64>(wndHeight_ * wndWidth_ * 4), NULL);
    g_object_set(G_OBJECT(appsrcelem), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(appsrcelem), "stream-type", GST_APP_STREAM_TYPE_STREAM, NULL);

    appsrc = GST_APP_SRC_CAST(appsrcelem);
#if GST_VERSION_MAJOR == 0
    GstCaps * caps = gst_caps_new_simple("video/x-raw-rgb",
                                         "bpp", G_TYPE_INT, 32,
                                         "endianness", G_TYPE_INT, 4321,
                                         "red_mask", G_TYPE_INT, -16777216,
                                         "green_mask", G_TYPE_INT, 16711680,
                                         "blue_mask", G_TYPE_INT, 65280,
                                         "alpha_mask", G_TYPE_INT, 255,
                                         "width", G_TYPE_INT, wndWidth_,
                                         "height", G_TYPE_INT, wndHeight_,
                                         "framerate", GST_TYPE_FRACTION, GSTREAMER_DEFAULT_FPS, 1,
                                         NULL);
    if (caps == NULL)
    {
        NVXIO_PRINT("Failed to create caps");
        FinalizeGStreamerPipeline();

        return false;
    }

#else
    // support 4 channel 8 bit data
    stream << "video/x-raw"
           << ", width=" << wndWidth_
           << ", height=" << wndHeight_
           << ", format=(string){RGBA}"
           << ", framerate=" << GSTREAMER_DEFAULT_FPS << "/1;";
    GstCaps * caps = gst_caps_from_string(stream.str().c_str());

    if (caps == NULL)
    {
        NVXIO_PRINT("Failed to create caps");
        FinalizeGStreamerPipeline();

        return false;
    }

    caps = gst_caps_fixate(caps);
#endif

    gst_app_src_set_caps(appsrc, caps);
    gst_caps_unref(caps);

    gst_bin_add(GST_BIN(pipeline), appsrcelem);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        NVXIO_PRINT("Cannot create " COLOR_ELEM " element");
        FinalizeGStreamerPipeline();

        return false;
    }
    gst_bin_add(GST_BIN(pipeline), color);

    // create videoflip element
    GstElement * videoflip = gst_element_factory_make("videoflip", NULL);
    if (videoflip == NULL)
    {
        NVXIO_PRINT("Cannot create videoflip element");
        FinalizeGStreamerPipeline();

        return false;
    }

    g_object_set(G_OBJECT(videoflip), "method", 5, NULL);

    gst_bin_add(GST_BIN(pipeline), videoflip);

    // create encodelem element
    GstElement * encodelem = gst_element_factory_make(ENCODE_ELEM, NULL);
    if (encodelem == NULL)
    {
        NVXIO_PRINT("Cannot create " ENCODE_ELEM " element");
        FinalizeGStreamerPipeline();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), encodelem);

    // create avimux element
    GstElement * avimux = gst_element_factory_make("avimux", NULL);
    if (avimux == NULL)
    {
        NVXIO_PRINT("Cannot create avimux element");
        FinalizeGStreamerPipeline();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), avimux);

    // create filesink element
    GstElement * filesink = gst_element_factory_make("filesink", NULL);
    if (filesink == NULL)
    {
        NVXIO_PRINT("Cannot create filesink element");
        FinalizeGStreamerPipeline();

        return false;
    }

    g_object_set(G_OBJECT(filesink), "location", windowTitle_.c_str(), NULL);
    g_object_set(G_OBJECT(filesink), "append", 0, NULL);

    gst_bin_add(GST_BIN(pipeline), filesink);


    // link elements
    if (!gst_element_link_many(appsrcelem, color, videoflip,
                               encodelem, avimux, filesink, NULL))
    {
        NVXIO_PRINT("GStreamer: cannot link appsrc -> " COLOR_ELEM
                    " -> videoflip -> " ENCODE_ELEM " -> avimux -> filesink");
        FinalizeGStreamerPipeline();

        return false;
    }

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    num_frames = 0;

    GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        NVXIO_PRINT("GStreamer: unable to start playback");
        FinalizeGStreamerPipeline();

        return false;
    }

    return true;
}