static void update_image (APP_STATE_T * state, GstBuffer * buffer) { GstVideoGLTextureUploadMeta *meta = NULL; if (state->current_buffer) { gst_buffer_unref (state->current_buffer); } else { /* Setup the model world */ init_model_proj (state); TRACE_VC_MEMORY ("after init_model_proj"); /* initialize the OGLES texture(s) */ init_textures (state, buffer); TRACE_VC_MEMORY ("after init_textures"); } state->current_buffer = gst_buffer_ref (buffer); TRACE_VC_MEMORY_ONCE_FOR_ID ("before GstVideoGLTextureUploadMeta", gid0); if (state->can_avoid_upload) { GstMemory *mem = gst_buffer_peek_memory (state->current_buffer, 0); g_assert (gst_is_gl_memory (mem)); state->tex = ((GstGLMemory *) mem)->tex_id; } else if ((meta = gst_buffer_get_video_gl_texture_upload_meta (buffer))) { if (meta->n_textures == 1) { guint ids[4] = { state->tex, 0, 0, 0 }; if (!gst_video_gl_texture_upload_meta_upload (meta, ids)) { GST_WARNING ("failed to upload to texture"); } } } TRACE_VC_MEMORY_ONCE_FOR_ID ("after GstVideoGLTextureUploadMeta", gid1); }
void MediaPlayerPrivateGStreamerBase::updateTexture(GstBuffer* buffer) { if (!m_texture) return; if (!client()) return; const void* srcData = 0; #ifdef GST_API_VERSION_1 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); #else GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(buffer); #endif if (!caps) return; IntSize size; GstVideoFormat format; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) return; if (m_texture->size() != size) m_texture->reset(size); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(m_texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) { client()->setPlatformLayerNeedsDisplay(); return; } } } #endif #ifdef GST_API_VERSION_1 GstMapInfo srcInfo; gst_buffer_map(buffer, &srcInfo, GST_MAP_READ); srcData = srcInfo.data; #else srcData = GST_BUFFER_DATA(buffer); #endif m_texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); #ifdef GST_API_VERSION_1 gst_buffer_unmap(buffer, &srcInfo); #endif client()->setPlatformLayerNeedsDisplay(); }
/* * Uploads using gst_video_gl_texture_upload_meta_upload(). * i.e. consumer of GstVideoGLTextureUploadMeta */ static void _do_upload_with_meta (GstGLContext * context, GstGLUpload * upload) { guint texture_ids[] = { upload->priv->tex_id, 0, 0, 0 }; if (!gst_video_gl_texture_upload_meta_upload (upload->priv->meta, texture_ids)) goto error; upload->priv->result = TRUE; return; error: upload->priv->result = FALSE; }
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) { WTF::GMutexLocker<GMutex> lock(m_sampleMutex); if (!GST_IS_SAMPLE(m_sample.get())) return nullptr; GstCaps* caps = gst_sample_get_caps(m_sample.get()); if (!caps) return nullptr; GstVideoInfo videoInfo; gst_video_info_init(&videoInfo); if (!gst_video_info_from_caps(&videoInfo, caps)) return nullptr; IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); GstBuffer* buffer = gst_sample_get_buffer(m_sample.get()); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) return texture; } } #endif // Right now the TextureMapper only supports chromas with one plane ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1); GstVideoFrame videoFrame; if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ)) return nullptr; int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0); const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0); texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); gst_video_frame_unmap(&videoFrame); return texture; }
void MediaPlayerPrivateGStreamerBase::updateTexture(GstBuffer* buffer) { if (!m_texture) return; if (!client()) return; const void* srcData = 0; IntSize size = naturalSize(); if (m_texture->size() != size) m_texture->reset(size); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(m_texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) { client()->setPlatformLayerNeedsDisplay(); return; } } } #endif #ifdef GST_API_VERSION_1 GstMapInfo srcInfo; gst_buffer_map(buffer, &srcInfo, GST_MAP_READ); srcData = srcInfo.data; #else srcData = GST_BUFFER_DATA(buffer); #endif m_texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), size.width() * 4, BitmapTexture::UpdateCannotModifyOriginalImageData); #ifdef GST_API_VERSION_1 gst_buffer_unmap(buffer, &srcInfo); #endif client()->setPlatformLayerNeedsDisplay(); }
PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) { g_mutex_lock(m_bufferMutex); if (!m_buffer) { g_mutex_unlock(m_bufferMutex); return 0; } const void* srcData = 0; #ifdef GST_API_VERSION_1 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); #else GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(m_buffer); #endif if (!caps) { g_mutex_unlock(m_bufferMutex); return 0; } IntSize size; GstVideoFormat format; int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) { g_mutex_unlock(m_bufferMutex); return 0; } RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size); #if GST_CHECK_VERSION(1, 1, 0) GstVideoGLTextureUploadMeta* meta; if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) { if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); guint ids[4] = { textureGL->id(), 0, 0, 0 }; if (gst_video_gl_texture_upload_meta_upload(meta, ids)) { g_mutex_unlock(m_bufferMutex); return texture; } } } #endif #ifdef GST_API_VERSION_1 GstMapInfo srcInfo; gst_buffer_map(m_buffer, &srcInfo, GST_MAP_READ); srcData = srcInfo.data; #else srcData = GST_BUFFER_DATA(m_buffer); #endif texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); #ifdef GST_API_VERSION_1 gst_buffer_unmap(m_buffer, &srcInfo); #endif g_mutex_unlock(m_bufferMutex); return texture; }