// Returns the size of the video IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const { if (!hasVideo()) return IntSize(); if (!m_videoSize.isEmpty()) return m_videoSize; GRefPtr<GstCaps> caps = currentVideoSinkCaps(); if (!caps) return IntSize(); // TODO: handle possible clean aperture data. See // https://bugzilla.gnome.org/show_bug.cgi?id=596571 // TODO: handle possible transformation matrix. See // https://bugzilla.gnome.org/show_bug.cgi?id=596326 // Get the video PAR and original size, if this fails the // video-sink has likely not yet negotiated its caps. int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; IntSize originalSize; GstVideoFormat format; if (!getVideoSizeAndFormatFromCaps(caps.get(), originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) return IntSize(); LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height()); LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); // Calculate DAR based on PAR and video size. int displayWidth = originalSize.width() * pixelAspectRatioNumerator; int displayHeight = originalSize.height() * pixelAspectRatioDenominator; // Divide display width and height by their GCD to avoid possible overflows. int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight); displayWidth /= displayAspectRatioGCD; displayHeight /= displayAspectRatioGCD; // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function. guint64 width = 0, height = 0; if (!(originalSize.height() % displayHeight)) { LOG_MEDIA_MESSAGE("Keeping video original height"); width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); height = static_cast<guint64>(originalSize.height()); } else if (!(originalSize.width() % displayWidth)) { LOG_MEDIA_MESSAGE("Keeping video original width"); height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth); width = static_cast<guint64>(originalSize.width()); } else { LOG_MEDIA_MESSAGE("Approximating while keeping original video height"); width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); height = static_cast<guint64>(originalSize.height()); } LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); m_videoSize = IntSize(static_cast<int>(width), static_cast<int>(height)); return m_videoSize; }
void MediaPlayerPrivateGStreamerBase::updateTexture(GstBuffer* buffer) { if (!m_texture) return; if (!client()) return; const void* srcData = 0; #ifdef GST_API_VERSION_1 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); #else GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(buffer); #endif if (!caps) return; IntSize size; GstVideoFormat format; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) return; if (m_texture->size() != size) m_texture->reset(size); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(m_texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) { client()->setPlatformLayerNeedsDisplay(); return; } } } #endif #ifdef GST_API_VERSION_1 GstMapInfo srcInfo; gst_buffer_map(buffer, &srcInfo, GST_MAP_READ); srcData = srcInfo.data; #else srcData = GST_BUFFER_DATA(buffer); #endif m_texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); #ifdef GST_API_VERSION_1 gst_buffer_unmap(buffer, &srcInfo); #endif client()->setPlatformLayerNeedsDisplay(); }
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) { g_mutex_lock(m_bufferMutex); if (!m_buffer) { g_mutex_unlock(m_bufferMutex); return 0; } const void* srcData = 0; #ifdef GST_API_VERSION_1 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); #else GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(m_buffer); #endif if (!caps) { g_mutex_unlock(m_bufferMutex); return 0; } IntSize size; GstVideoFormat format; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) { g_mutex_unlock(m_bufferMutex); return 0; } RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) { g_mutex_unlock(m_bufferMutex); return texture; } } } #endif #ifdef GST_API_VERSION_1 GstMapInfo srcInfo; gst_buffer_map(m_buffer, &srcInfo, GST_MAP_READ); srcData = srcInfo.data; #else srcData = GST_BUFFER_DATA(m_buffer); #endif texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); #ifdef GST_API_VERSION_1 gst_buffer_unmap(m_buffer, &srcInfo); #endif g_mutex_unlock(m_bufferMutex); return texture; }
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer) { WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink); WebKitVideoSinkPrivate* priv = sink->priv; g_mutex_lock(priv->bufferMutex); if (priv->unlocked) { g_mutex_unlock(priv->bufferMutex); return GST_FLOW_OK; } #if USE(NATIVE_FULLSCREEN_VIDEO) // Ignore buffers if the video is already in fullscreen using // another sink. if (priv->gstGWorld->isFullscreen()) { g_mutex_unlock(priv->bufferMutex); return GST_FLOW_OK; } #endif priv->buffer = gst_buffer_ref(buffer); #ifndef GST_API_VERSION_1 // For the unlikely case where the buffer has no caps, the caps // are implicitely the caps of the pad. This shouldn't happen. if (UNLIKELY(!GST_BUFFER_CAPS(buffer))) { buffer = priv->buffer = gst_buffer_make_metadata_writable(priv->buffer); gst_buffer_set_caps(priv->buffer, GST_PAD_CAPS(GST_BASE_SINK_PAD(baseSink))); } GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(buffer); #else GRefPtr<GstCaps> caps; // The video info structure is valid only if the sink handled an allocation query. if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN) caps = adoptGRef(gst_video_info_to_caps(&priv->info)); else caps = priv->currentCaps; #endif GstVideoFormat format; WebCore::IntSize size; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) { gst_buffer_unref(buffer); g_mutex_unlock(priv->bufferMutex); return GST_FLOW_ERROR; } // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't. // Here we convert to Cairo's ARGB. if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) { // Because GstBaseSink::render() only owns the buffer reference in the // method scope we can't use gst_buffer_make_writable() here. Also // The buffer content should not be changed here because the same buffer // could be passed multiple times to this method (in theory). GstBuffer* newBuffer = createGstBuffer(buffer); // Check if allocation failed. if (UNLIKELY(!newBuffer)) { g_mutex_unlock(priv->bufferMutex); return GST_FLOW_ERROR; } // We don't use Color::premultipliedARGBFromColor() here because // one function call per video pixel is just too expensive: // For 720p/PAL for example this means 1280*720*25=23040000 // function calls per second! #ifndef GST_API_VERSION_1 const guint8* source = GST_BUFFER_DATA(buffer); guint8* destination = GST_BUFFER_DATA(newBuffer); #else GstMapInfo sourceInfo; GstMapInfo destinationInfo; gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ); const guint8* source = const_cast<guint8*>(sourceInfo.data); gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE); guint8* destination = static_cast<guint8*>(destinationInfo.data); #endif for (int x = 0; x < size.height(); x++) { for (int y = 0; y < size.width(); y++) { #if G_BYTE_ORDER == G_LITTLE_ENDIAN unsigned short alpha = source[3]; destination[0] = (source[0] * alpha + 128) / 255; destination[1] = (source[1] * alpha + 128) / 255; destination[2] = (source[2] * alpha + 128) / 255; destination[3] = alpha; #else unsigned short alpha = source[0]; destination[0] = alpha; destination[1] = (source[1] * alpha + 128) / 255; destination[2] = (source[2] * alpha + 128) / 255; destination[3] = (source[3] * alpha + 128) / 255; #endif source += 4; destination += 4; } } #ifdef GST_API_VERSION_1 gst_buffer_unmap(buffer, &sourceInfo); gst_buffer_unmap(newBuffer, &destinationInfo); #endif gst_buffer_unref(buffer); buffer = priv->buffer = newBuffer; } // This should likely use a lower priority, but glib currently starves // lower priority sources. // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830. priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback, gst_object_ref(sink), reinterpret_cast<GDestroyNotify>(gst_object_unref)); g_cond_wait(priv->dataCondition, priv->bufferMutex); g_mutex_unlock(priv->bufferMutex); return GST_FLOW_OK; }
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer) { WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink); WebKitVideoSinkPrivate* priv = sink->priv; g_mutex_lock(&priv->bufferMutex); if (priv->unlocked) { g_mutex_unlock(&priv->bufferMutex); return GST_FLOW_OK; } priv->buffer = gst_buffer_ref(buffer); GstCaps* caps; // The video info structure is valid only if the sink handled an allocation query. if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN) caps = gst_video_info_to_caps(&priv->info); else caps = priv->currentCaps; GstVideoFormat format; IntSize size; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps, &size, &format, &pixelAspectRatioNumerator, &pixelAspectRatioDenominator, &stride)) { gst_caps_unref(caps); gst_buffer_unref(buffer); g_mutex_unlock(&priv->bufferMutex); return GST_FLOW_ERROR; } gst_caps_unref(caps); // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't. // Here we convert to Cairo's ARGB. if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) { // Because GstBaseSink::render() only owns the buffer reference in the // method scope we can't use gst_buffer_make_writable() here. Also // The buffer content should not be changed here because the same buffer // could be passed multiple times to this method (in theory). GstBuffer* newBuffer = createGstBuffer(buffer); // Check if allocation failed. if (G_UNLIKELY(!newBuffer)) { g_mutex_unlock(&priv->bufferMutex); return GST_FLOW_ERROR; } // We don't use Color::premultipliedARGBFromColor() here because // one function call per video pixel is just too expensive: // For 720p/PAL for example this means 1280*720*25=23040000 // function calls per second! GstMapInfo sourceInfo; GstMapInfo destinationInfo; gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ); const guint8* source = sourceInfo.data; gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE); guint8* destination = destinationInfo.data; for (int x = 0; x < size.Height; x++) { for (int y = 0; y < size.Width; y++) { #if G_BYTE_ORDER == G_LITTLE_ENDIAN unsigned short alpha = source[3]; destination[0] = (source[0] * alpha + 128) / 255; destination[1] = (source[1] * alpha + 128) / 255; destination[2] = (source[2] * alpha + 128) / 255; destination[3] = alpha; #else unsigned short alpha = source[0]; destination[0] = alpha; destination[1] = (source[1] * alpha + 128) / 255; destination[2] = (source[2] * alpha + 128) / 255; destination[3] = (source[3] * alpha + 128) / 255; #endif source += 4; destination += 4; } } gst_buffer_unmap(buffer, &sourceInfo); gst_buffer_unmap(newBuffer, &destinationInfo); gst_buffer_unref(buffer); buffer = priv->buffer = newBuffer; } // This should likely use a lower priority, but glib currently starves // lower priority sources. // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830. priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback, gst_object_ref(sink), (GDestroyNotify) gst_object_unref); g_source_set_name_by_id(priv->timeoutId, "[WebKit] webkitVideoSinkTimeoutCallback"); if (!priv->silent) print_buffer_metadata(sink, buffer); g_cond_wait(&priv->dataCondition, &priv->bufferMutex); g_mutex_unlock(&priv->bufferMutex); return GST_FLOW_OK; }