Пример #1
0
gboolean owr_media_renderer_set_source(OwrMediaRenderer *renderer, OwrMediaSource *source)
{
    OwrMediaRendererPrivate *priv;
    gboolean ret = TRUE;
    GstPad *srcpad, *sinkpad;
    GstCaps *caps;
    GstPadLinkReturn pad_link_return;

    g_assert(renderer);
    g_assert(source);

    priv = renderer->priv;

    g_mutex_lock(&priv->media_renderer_lock);

    if (priv->source) {
        unlink_source(renderer);
        g_object_unref(priv->source);
        priv->source = NULL;
    }

    g_mutex_unlock(&priv->media_renderer_lock);
    /* FIXME - too much locking/unlocking of the same lock across private API? */

    sinkpad = _owr_media_renderer_get_pad(renderer);
    g_assert(sinkpad);
    caps = OWR_MEDIA_RENDERER_GET_CLASS(renderer)->get_caps(renderer);
    srcpad = _owr_media_source_get_pad(source, caps);
    gst_caps_unref(caps);
    g_assert(srcpad);

    g_mutex_lock(&priv->media_renderer_lock);

    pad_link_return = gst_pad_link(srcpad, sinkpad);
    if (pad_link_return != GST_PAD_LINK_OK) {
        GST_ERROR("Failed to link source with renderer (%d)", pad_link_return);
        ret = FALSE;
        goto done;
    }

    gst_element_post_message(_owr_get_pipeline(), gst_message_new_latency(GST_OBJECT(_owr_get_pipeline())));

    priv->source = g_object_ref(source);

done:
    priv->srcpad = srcpad;
    priv->sinkpad = sinkpad;
    g_mutex_unlock(&priv->media_renderer_lock);
    return ret;
}
/*
 * owr_local_media_source_get_pad
 *
 * The beginning of a media source chain in the pipeline looks like this:
 *
 * +--------+   +------------+   +-----+
 * | source +---+ capsfilter +---+ tee +---
 * +--------+   +------------+   +-----+
 *
 * Only one such chain is created per media source for the initial get_pad
 * call. Subsequent calls will just obtain another tee pad. After these initial
 * elements are created, they are linked together and synced up to the PLAYING
 * state.
 *
 * Once the initial chain is created, a block is placed on the new src pad of
 * the tee. The rest of the new chain (conversion elements, capsfilter, queues,
 * etc.) is created, linked and synced in the pad block callback.
 */
static GstPad *owr_local_media_source_get_pad(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrLocalMediaSource *local_source;
    OwrLocalMediaSourcePrivate *priv;
    GstElement *source_bin, *post_tee_bin;
    GstElement *source = NULL, *capsfilter = NULL, *tee;
    GstPad *ghostpad = NULL;
    gchar *pad_name;
    OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN;
    OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN;
    OwrCodecType codec_type = OWR_CODEC_TYPE_NONE;
    guint source_id;

    g_assert(media_source);
    local_source = OWR_LOCAL_MEDIA_SOURCE(media_source);
    priv = local_source->priv;

    g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL);

    /* only create the source bin for this media source once */
    if (_owr_media_source_get_element(media_source)) {
        GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin");
        source_bin = _owr_media_source_get_element(media_source);
        tee = priv->source_tee;
    } else {
        GEnumClass *media_enum_class, *source_enum_class;
        GEnumValue *media_enum_value, *source_enum_value;
        gchar *bin_name;
        GstCaps *source_caps;
        GstStructure *source_structure;
        GstElement *fakesink;

        media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE));
        source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE));
        media_enum_value = g_enum_get_value(media_enum_class, media_type);
        source_enum_value = g_enum_get_value(source_enum_class, source_type);

        bin_name = g_strdup_printf("local-%s-%s-source-bin-%u",
            media_enum_value ? media_enum_value->value_nick : "unknown",
            source_enum_value ? source_enum_value->value_nick : "unknown",
            g_atomic_int_add(&unique_bin_id, 1));

        g_type_class_unref(media_enum_class);
        g_type_class_unref(source_enum_class);

        source_bin = gst_bin_new(bin_name);

        g_free(bin_name);
        bin_name = NULL;

        gst_bin_add(GST_BIN(_owr_get_pipeline()), source_bin);
        gst_element_sync_state_with_parent(GST_ELEMENT(source_bin));

        GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type);

        if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) {
            GST_ERROR_OBJECT(local_source,
                "Cannot connect source with unknown type or media type to other component");
            goto done;
        }

        switch (media_type) {
        case OWR_MEDIA_TYPE_AUDIO:
            {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");
#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR
                g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000),
                    "latency-time", G_GINT64_CONSTANT(10000), NULL);
#ifdef __APPLE__
                g_object_set(source, "device", priv->device_index, NULL);
#endif
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "audiotestsrc", "audio-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
            /* workaround for osxaudiosrc bug
             * https://bugzilla.gnome.org/show_bug.cgi?id=711764 */
            CREATE_ELEMENT(capsfilter, "capsfilter", "audio-source-capsfilter");
            source_caps = gst_caps_copy(caps);
            source_structure = gst_caps_get_structure(source_caps, 0);
            gst_structure_set(source_structure,
                "format", G_TYPE_STRING, "S32LE",
                "rate", G_TYPE_INT, 44100, NULL);
            gst_structure_remove_field(source_structure, "channels");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_bin), capsfilter);
#endif

            break;
            }
        case OWR_MEDIA_TYPE_VIDEO:
        {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, VIDEO_SRC, "video-source");
#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
                g_object_set(source, "device-index", priv->device_index, NULL);
#elif defined(__ANDROID__)
                g_object_set(source, "cam-index", priv->device_index, NULL);
#elif defined(__linux__)
                {
                    gchar *device = g_strdup_printf("/dev/video%u", priv->device_index);
                    g_object_set(source, "device", device, NULL);
                    g_free(device);
                }
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "videotestsrc", "video-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter");
            source_caps = gst_caps_copy(caps);
            source_structure = gst_caps_get_structure(source_caps, 0);
            gst_structure_remove_field(source_structure, "format");
            gst_structure_remove_field(source_structure, "framerate");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_bin), capsfilter);

            break;
        }
        case OWR_MEDIA_TYPE_UNKNOWN:
        default:
            g_assert_not_reached();
            goto done;
        }
        g_assert(source);

        CREATE_ELEMENT(tee, "tee", "source-tee");

        CREATE_ELEMENT(fakesink, "fakesink", "source-tee-fakesink");
        g_object_set(fakesink, "async", FALSE, NULL);

        gst_bin_add_many(GST_BIN(source_bin), source, tee, fakesink, NULL);

        gst_element_sync_state_with_parent(fakesink);
        LINK_ELEMENTS(tee, fakesink);

        if (!source)
            GST_ERROR_OBJECT(media_source, "Failed to create source element!");
    }

    codec_type = _owr_caps_to_codec_type(caps);
    source_id = g_atomic_int_add(&unique_pad_id, 1);

    pad_name = g_strdup_printf("src_%u_%u", codec_type, source_id);
    ghostpad = gst_ghost_pad_new_no_target(pad_name, GST_PAD_SRC);
    g_free(pad_name);

    post_tee_bin = create_post_tee_bin(media_source, source_bin, caps, ghostpad, source_id);
    if (!post_tee_bin) {
        gst_object_unref(ghostpad);
        ghostpad = NULL;
        goto done;
    }

    if (!gst_element_link(tee, post_tee_bin)) {
        GST_ERROR("Failed to link source tee to source-post-tee-bin-%u", source_id);
        g_object_unref(post_tee_bin);
        ghostpad = NULL;
        goto done;
    }

    if (!_owr_media_source_get_element(media_source)) {
        /* the next code block inside the if is a workaround for avfvideosrc
         * not handling on-the-fly reconfiguration
         * on upstream reconfigure events, we drop the event in the probe */
        if (media_type == OWR_MEDIA_TYPE_VIDEO) {
            GstPad *tee_sinkpad;

            tee_sinkpad = gst_element_get_static_pad(tee, "sink");
            gst_pad_add_probe(tee_sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
                drop_reconfigure_cb, NULL, NULL);
        }

        if (capsfilter) {
            LINK_ELEMENTS(capsfilter, tee);
            gst_element_sync_state_with_parent(tee);
            gst_element_sync_state_with_parent(capsfilter);
            LINK_ELEMENTS(source, capsfilter);
        } else {
            gst_element_sync_state_with_parent(tee);
            LINK_ELEMENTS(source, tee);
        }
        gst_element_sync_state_with_parent(source);
        _owr_media_source_set_element(media_source, source_bin);
        priv->source_tee = tee;
    }

done:
    return ghostpad;
}
Пример #3
0
static GstElement *owr_video_renderer_get_element(OwrMediaRenderer *renderer)
{
    OwrVideoRenderer *video_renderer;
    OwrVideoRendererPrivate *priv;
    GstElement *videorate, *videoscale, *videoconvert, *capsfilter, *balance, *queue, *sink;
    GstCaps *filter_caps;
    GstPad *ghostpad, *sinkpad;
    gint fps_n = 0, fps_d = 1;
    gchar *bin_name;

    g_assert(renderer);
    video_renderer = OWR_VIDEO_RENDERER(renderer);
    priv = video_renderer->priv;

    g_mutex_lock(&priv->video_renderer_lock);

    if (priv->renderer_bin)
        goto done;

    bin_name = g_strdup_printf("video-renderer-bin-%u", g_atomic_int_add(&unique_bin_id, 1));
    priv->renderer_bin = gst_bin_new(bin_name);
    g_free(bin_name);

    gst_bin_add(GST_BIN(_owr_get_pipeline()), priv->renderer_bin);
    gst_element_sync_state_with_parent(GST_ELEMENT(priv->renderer_bin));

    videorate = gst_element_factory_make("videorate", "video-renderer-rate");
    g_object_set(videorate, "drop-only", TRUE, NULL);

    videoscale = gst_element_factory_make("videoscale", "video-renderer-scale");
    videoconvert = gst_element_factory_make(VIDEO_CONVERT, "video-renderer-convert");

    gst_util_double_to_fraction(priv->max_framerate, &fps_n, &fps_d);

    capsfilter = gst_element_factory_make("capsfilter", "video-renderer-capsfilter");
    filter_caps = gst_caps_new_empty_simple("video/x-raw");
    if (priv->width > 0)
        gst_caps_set_simple(filter_caps, "width", G_TYPE_INT, priv->width, NULL);
    if (priv->height > 0)
        gst_caps_set_simple(filter_caps, "height", G_TYPE_INT, priv->height, NULL);
    if (fps_n > 0 && fps_d > 0)
        gst_caps_set_simple(filter_caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
    g_object_set(capsfilter, "caps", filter_caps, NULL);

    balance = gst_element_factory_make("videobalance", "video-renderer-balance");
    g_signal_connect_object(renderer, "notify::disabled", G_CALLBACK(renderer_disabled),
        balance, 0);
    renderer_disabled(renderer, NULL, balance);

    queue = gst_element_factory_make("queue", "video-renderer-queue");
    g_assert(queue);
    g_object_set(queue, "max-size-buffers", 3, "max-size-bytes", 0, "max-size-time", 0, NULL);

    sink = gst_element_factory_make(VIDEO_SINK, "video-renderer-sink");
    g_assert(sink);
    if (GST_IS_VIDEO_OVERLAY(sink))
        gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), priv->window_handle);

    /* async false is needed when using live sources to not require prerolling
     * as prerolling is not possible from live sources in GStreamer */
    g_object_set(sink, "async", FALSE, NULL);

    gst_bin_add_many(GST_BIN(priv->renderer_bin), videorate, videoscale,
        videoconvert, capsfilter, balance, queue, sink, NULL);

    LINK_ELEMENTS(queue, sink);
    LINK_ELEMENTS(balance, queue);
    LINK_ELEMENTS(capsfilter, balance);
    LINK_ELEMENTS(videoconvert, capsfilter);
    LINK_ELEMENTS(videoscale, videoconvert);
    LINK_ELEMENTS(videorate, videoscale);

    sinkpad = gst_element_get_static_pad(videorate, "sink");
    g_assert(sinkpad);
    ghostpad = gst_ghost_pad_new("sink", sinkpad);
    gst_pad_set_active(ghostpad, TRUE);
    gst_element_add_pad(priv->renderer_bin, ghostpad);
    gst_object_unref(sinkpad);

    gst_element_sync_state_with_parent(sink);
    gst_element_sync_state_with_parent(queue);
    gst_element_sync_state_with_parent(balance);
    gst_element_sync_state_with_parent(capsfilter);
    gst_element_sync_state_with_parent(videoconvert);
    gst_element_sync_state_with_parent(videoscale);
    gst_element_sync_state_with_parent(videorate);
done:
    g_mutex_unlock(&priv->video_renderer_lock);
    return priv->renderer_bin;
}