コード例 #1
0
static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    WebKitVideoSinkPrivate* priv = sink->priv;

    GST_DEBUG_OBJECT(sink, "Current caps %" GST_PTR_FORMAT ", setting caps %" GST_PTR_FORMAT, priv->currentCaps, caps);

#ifdef GST_API_VERSION_1
    GstVideoInfo info;
    if (!gst_video_info_from_caps(&info, caps)) {
        GST_ERROR_OBJECT(sink, "Invalid caps %" GST_PTR_FORMAT, caps);
        return FALSE;
    }
#endif

    gst_caps_replace(&priv->currentCaps, caps);
    return TRUE;
}
コード例 #2
0
static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
{
    GstCaps* caps;
    gst_query_parse_allocation(query, &caps, 0);
    if (!caps)
        return FALSE;

    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    if (!gst_video_info_from_caps(&sink->priv->info, caps))
        return FALSE;

    gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
    gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
#if GST_CHECK_VERSION(1, 1, 0)
    gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
#endif
    return TRUE;
}
コード例 #3
0
ファイル: VideoSinkGStreamer.c プロジェクト: ceyusa/gst-wk
static void webkitVideoSinkGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* parameterSpec)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(object);
    WebKitVideoSinkPrivate* priv = sink->priv;

    switch (propertyId) {
    case PROP_CAPS: {
        GstCaps* caps = priv->currentCaps;
        if (caps)
            gst_caps_ref(caps);
        g_value_take_boxed(value, caps);
        break;
    }
    case PROP_SILENT:
        g_value_set_boolean(value, priv->silent);
        break;
    default:
        G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, parameterSpec);
    }
}
コード例 #4
0
/**
 * webkit_video_sink_new:
 *
 * Creates a new GStreamer video sink.
 *
 * Return value: a #GstElement for the newly created video sink
 */
GstElement* webkit_video_sink_new(WebCore::GStreamerGWorld* gstGWorld)
{
    GstElement* element = GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
    WEBKIT_VIDEO_SINK(element)->priv->gstGWorld = gstGWorld;
    return element;
}
コード例 #5
0
static GstFlowReturn
webkit_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(bsink);
    WebKitVideoSinkPrivate* priv = sink->priv;

    g_mutex_lock(priv->buffer_mutex);

    if (priv->unlocked) {
        g_mutex_unlock(priv->buffer_mutex);
        return GST_FLOW_OK;
    }

    // Ignore buffers if the video is already in fullscreen using
    // another sink.
    if (priv->gstGWorld->isFullscreen()) {
        g_mutex_unlock(priv->buffer_mutex);
        return GST_FLOW_OK;
    }

    priv->buffer = gst_buffer_ref(buffer);

    // For the unlikely case where the buffer has no caps, the caps
    // are implicitely the caps of the pad. This shouldn't happen.
    if (G_UNLIKELY(!GST_BUFFER_CAPS(buffer))) {
        buffer = priv->buffer = gst_buffer_make_metadata_writable(priv->buffer);
        gst_buffer_set_caps(priv->buffer, GST_PAD_CAPS(GST_BASE_SINK_PAD(bsink)));
    }

    GstCaps *caps = GST_BUFFER_CAPS(buffer);
    GstVideoFormat format;
    int width, height;
    if (G_UNLIKELY(!gst_video_format_parse_caps(caps, &format, &width, &height))) {
        gst_buffer_unref(buffer);
        g_mutex_unlock(priv->buffer_mutex);
        return GST_FLOW_ERROR;
    }

    // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
    // Here we convert to Cairo's ARGB.
    if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
        // Because GstBaseSink::render() only owns the buffer reference in the
        // method scope we can't use gst_buffer_make_writable() here. Also
        // The buffer content should not be changed here because the same buffer
        // could be passed multiple times to this method (in theory)
        GstBuffer *newBuffer = gst_buffer_try_new_and_alloc(GST_BUFFER_SIZE(buffer));

        // Check if allocation failed
        if (G_UNLIKELY(!newBuffer)) {
            gst_buffer_unref(buffer);
            g_mutex_unlock(priv->buffer_mutex);
            return GST_FLOW_ERROR;
        }

        gst_buffer_copy_metadata(newBuffer, buffer, (GstBufferCopyFlags) GST_BUFFER_COPY_ALL);

        // We don't use Color::premultipliedARGBFromColor() here because
        // one function call per video pixel is just too expensive:
        // For 720p/PAL for example this means 1280*720*25=23040000
        // function calls per second!
        unsigned short alpha;
        const guint8 *source = GST_BUFFER_DATA(buffer);
        guint8 *destination = GST_BUFFER_DATA(newBuffer);

        for (int x = 0; x < height; x++) {
            for (int y = 0; y < width; y++) {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                alpha = source[3];
                destination[0] = (source[0] * alpha + 128) / 255;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = alpha;
#else
                alpha = source[0];
                destination[0] = alpha;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = (source[3] * alpha + 128) / 255;
#endif
                source += 4;
                destination += 4;
            }
        }
        gst_buffer_unref(buffer);
        buffer = priv->buffer = newBuffer;
    }

    // This should likely use a lower priority, but glib currently starves
    // lower priority sources.
    // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830.
    priv->timeout_id = g_timeout_add_full(G_PRIORITY_DEFAULT, 0,
                                          webkit_video_sink_timeout_func,
                                          gst_object_ref(sink),
                                          (GDestroyNotify)gst_object_unref);

    g_cond_wait(priv->data_cond, priv->buffer_mutex);
    g_mutex_unlock(priv->buffer_mutex);
    return GST_FLOW_OK;
}
コード例 #6
0
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    WebKitVideoSinkPrivate* priv = sink->priv;

    g_mutex_lock(priv->bufferMutex);

    if (priv->unlocked) {
        g_mutex_unlock(priv->bufferMutex);
        return GST_FLOW_OK;
    }

#if USE(NATIVE_FULLSCREEN_VIDEO)
    // Ignore buffers if the video is already in fullscreen using
    // another sink.
    if (priv->gstGWorld->isFullscreen()) {
        g_mutex_unlock(priv->bufferMutex);
        return GST_FLOW_OK;
    }
#endif

    priv->buffer = gst_buffer_ref(buffer);

#ifndef GST_API_VERSION_1
    // For the unlikely case where the buffer has no caps, the caps
    // are implicitely the caps of the pad. This shouldn't happen.
    if (UNLIKELY(!GST_BUFFER_CAPS(buffer))) {
        buffer = priv->buffer = gst_buffer_make_metadata_writable(priv->buffer);
        gst_buffer_set_caps(priv->buffer, GST_PAD_CAPS(GST_BASE_SINK_PAD(baseSink)));
    }

    GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(buffer);
#else
    GRefPtr<GstCaps> caps;
    // The video info structure is valid only if the sink handled an allocation query.
    if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN)
        caps = adoptGRef(gst_video_info_to_caps(&priv->info));
    else
        caps = priv->currentCaps;
#endif

    GstVideoFormat format;
    WebCore::IntSize size;
    int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
    if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) {
        gst_buffer_unref(buffer);
        g_mutex_unlock(priv->bufferMutex);
        return GST_FLOW_ERROR;
    }

    // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
    // Here we convert to Cairo's ARGB.
    if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
        // Because GstBaseSink::render() only owns the buffer reference in the
        // method scope we can't use gst_buffer_make_writable() here. Also
        // The buffer content should not be changed here because the same buffer
        // could be passed multiple times to this method (in theory).

        GstBuffer* newBuffer = createGstBuffer(buffer);

        // Check if allocation failed.
        if (UNLIKELY(!newBuffer)) {
            g_mutex_unlock(priv->bufferMutex);
            return GST_FLOW_ERROR;
        }

        // We don't use Color::premultipliedARGBFromColor() here because
        // one function call per video pixel is just too expensive:
        // For 720p/PAL for example this means 1280*720*25=23040000
        // function calls per second!
#ifndef GST_API_VERSION_1
        const guint8* source = GST_BUFFER_DATA(buffer);
        guint8* destination = GST_BUFFER_DATA(newBuffer);
#else
        GstMapInfo sourceInfo;
        GstMapInfo destinationInfo;
        gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ);
        const guint8* source = const_cast<guint8*>(sourceInfo.data);
        gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE);
        guint8* destination = static_cast<guint8*>(destinationInfo.data);
#endif

        for (int x = 0; x < size.height(); x++) {
            for (int y = 0; y < size.width(); y++) {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                unsigned short alpha = source[3];
                destination[0] = (source[0] * alpha + 128) / 255;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = alpha;
#else
                unsigned short alpha = source[0];
                destination[0] = alpha;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = (source[3] * alpha + 128) / 255;
#endif
                source += 4;
                destination += 4;
            }
        }

#ifdef GST_API_VERSION_1
        gst_buffer_unmap(buffer, &sourceInfo);
        gst_buffer_unmap(newBuffer, &destinationInfo);
#endif
        gst_buffer_unref(buffer);
        buffer = priv->buffer = newBuffer;
    }

    // This should likely use a lower priority, but glib currently starves
    // lower priority sources.
    // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830.
    priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback,
                                          gst_object_ref(sink), reinterpret_cast<GDestroyNotify>(gst_object_unref));

    g_cond_wait(priv->dataCondition, priv->bufferMutex);
    g_mutex_unlock(priv->bufferMutex);
    return GST_FLOW_OK;
}
コード例 #7
0
ファイル: VideoSinkGStreamer.c プロジェクト: ceyusa/gst-wk
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    WebKitVideoSinkPrivate* priv = sink->priv;

    g_mutex_lock(&priv->bufferMutex);

    if (priv->unlocked) {
        g_mutex_unlock(&priv->bufferMutex);
        return GST_FLOW_OK;
    }

    priv->buffer = gst_buffer_ref(buffer);

    GstCaps* caps;
    // The video info structure is valid only if the sink handled an allocation query.
    if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN)
        caps = gst_video_info_to_caps(&priv->info);
    else
        caps = priv->currentCaps;

    GstVideoFormat format;
    IntSize size;
    int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
    if (!getVideoSizeAndFormatFromCaps(caps, &size, &format, &pixelAspectRatioNumerator, &pixelAspectRatioDenominator, &stride)) {
        gst_caps_unref(caps);
        gst_buffer_unref(buffer);
        g_mutex_unlock(&priv->bufferMutex);
        return GST_FLOW_ERROR;
    }

    gst_caps_unref(caps);

    // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
    // Here we convert to Cairo's ARGB.
    if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
        // Because GstBaseSink::render() only owns the buffer reference in the
        // method scope we can't use gst_buffer_make_writable() here. Also
        // The buffer content should not be changed here because the same buffer
        // could be passed multiple times to this method (in theory).

        GstBuffer* newBuffer = createGstBuffer(buffer);

        // Check if allocation failed.
        if (G_UNLIKELY(!newBuffer)) {
            g_mutex_unlock(&priv->bufferMutex);
            return GST_FLOW_ERROR;
        }

        // We don't use Color::premultipliedARGBFromColor() here because
        // one function call per video pixel is just too expensive:
        // For 720p/PAL for example this means 1280*720*25=23040000
        // function calls per second!
        GstMapInfo sourceInfo;
        GstMapInfo destinationInfo;
        gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ);
        const guint8* source = sourceInfo.data;
        gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE);
        guint8* destination = destinationInfo.data;

        for (int x = 0; x < size.Height; x++) {
            for (int y = 0; y < size.Width; y++) {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                unsigned short alpha = source[3];
                destination[0] = (source[0] * alpha + 128) / 255;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = alpha;
#else
                unsigned short alpha = source[0];
                destination[0] = alpha;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = (source[3] * alpha + 128) / 255;
#endif
                source += 4;
                destination += 4;
            }
        }

        gst_buffer_unmap(buffer, &sourceInfo);
        gst_buffer_unmap(newBuffer, &destinationInfo);
        gst_buffer_unref(buffer);
        buffer = priv->buffer = newBuffer;
    }

    // This should likely use a lower priority, but glib currently starves
    // lower priority sources.
    // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830.
    priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback,
                                         gst_object_ref(sink), (GDestroyNotify) gst_object_unref);
    g_source_set_name_by_id(priv->timeoutId, "[WebKit] webkitVideoSinkTimeoutCallback");

    if (!priv->silent)
        print_buffer_metadata(sink, buffer);

    g_cond_wait(&priv->dataCondition, &priv->bufferMutex);
    g_mutex_unlock(&priv->bufferMutex);
    return GST_FLOW_OK;
}
コード例 #8
0
static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    GstCaps* caps = NULL;
    gboolean need_pool;

    gst_query_parse_allocation(query, &caps, &need_pool);
    if (!caps)
        return FALSE;

    if (!gst_video_info_from_caps(&sink->priv->info, caps))
        return FALSE;

#if USE(OPENGL_ES_2) && GST_CHECK_VERSION(1, 3, 0)
    // Code adapted from gst-plugins-bad's glimagesink.

    GstBufferPool* pool;
    GstStructure* config;
    guint size;
    GstAllocator* allocator = 0;
    GstAllocationParams params;

    if (!_ensure_gl_setup(sink))
        return FALSE;

    if ((pool = sink->priv->pool))
        gst_object_ref(pool);

    if (pool) {
        GstCaps* pcaps;

        // We had a pool, check its caps.
        GST_DEBUG_OBJECT (sink, "check existing pool caps");
        config = gst_buffer_pool_get_config(pool);
        gst_buffer_pool_config_get_params(config, &pcaps, &size, 0, 0);

        if (!gst_caps_is_equal(caps, pcaps)) {
            GST_DEBUG_OBJECT(sink, "pool has different caps");
            // Different caps, we can't use this pool.
            gst_object_unref(pool);
            pool = 0;
        }
        gst_structure_free(config);
    }

    if (need_pool && !pool) {
        GstVideoInfo info;

        if (!gst_video_info_from_caps(&info, caps)) {
            GST_DEBUG_OBJECT(sink, "invalid caps specified");
            return FALSE;
        }

        GST_DEBUG_OBJECT(sink, "create new pool");
        pool = gst_gl_buffer_pool_new(sink->priv->context);

        // The normal size of a frame.
        size = info.size;

        config = gst_buffer_pool_get_config(pool);
        gst_buffer_pool_config_set_params(config, caps, size, 0, 0);
        if (!gst_buffer_pool_set_config(pool, config)) {
            GST_DEBUG_OBJECT(sink, "failed setting config");
            return FALSE;
        }
    }

    // [WiP] Let's require 8 buffers for now. The player holds to the last 3
    // ones and the sink holds only the last one so in theory 5 should
    // be enough.
    if (pool) {
        gst_query_add_allocation_pool(query, pool, size, 8, 0);
        gst_object_unref(pool);
    }

    gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);

    gst_allocation_params_init(&params);
    allocator = gst_allocator_find(GST_EGL_IMAGE_MEMORY_TYPE);
    gst_query_add_allocation_param(query, allocator, &params);
    gst_object_unref(allocator);
#else
    gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
    gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
    gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
#endif
    return TRUE;
}