guint gst_gl_sized_gl_format_from_gl_format_type (GstGLContext * context, guint format, guint type) { gboolean ext_texture_rg = gst_gl_context_check_feature (context, "GL_EXT_texture_rg"); switch (format) { case GL_RGBA: switch (type) { case GL_UNSIGNED_BYTE: return USING_GLES2 (context) && !USING_GLES3 (context) ? GL_RGBA : GL_RGBA8; break; } break; case GL_RGB: switch (type) { case GL_UNSIGNED_BYTE: return GL_RGB8; break; case GL_UNSIGNED_SHORT_5_6_5: return GL_RGB; break; } break; case GL_RG: switch (type) { case GL_UNSIGNED_BYTE: if (!USING_GLES3 (context) && USING_GLES2 (context) && ext_texture_rg) return GL_RG; return GL_RG8; break; } break; case GL_RED: switch (type) { case GL_UNSIGNED_BYTE: if (!USING_GLES3 (context) && USING_GLES2 (context) && ext_texture_rg) return GL_RED; return GL_R8; break; } break; case GL_LUMINANCE: return GL_LUMINANCE; break; case GL_LUMINANCE_ALPHA: return GL_LUMINANCE_ALPHA; break; case GL_ALPHA: return GL_ALPHA; break; default: break; } g_assert_not_reached (); return 0; }
static void _GRAY_to_RGB (GstGLColorConvert * convert) { struct ConvertInfo *info = &convert->priv->convert_info; GstVideoFormat out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info); const gchar *out_format_str = gst_video_format_to_string (out_format); gchar *pixel_order = _RGB_pixel_order ("rgba", out_format_str); #if GST_GL_HAVE_PLATFORM_EAGL gboolean texture_rg = FALSE; #else gboolean texture_rg = gst_gl_context_check_feature (convert->context, "GL_EXT_texture_rg") || gst_gl_context_check_feature (convert->context, "GL_ARB_texture_rg"); #endif info->in_n_textures = 1; info->out_n_textures = 1; info->shader_tex_names[0] = "tex"; switch (GST_VIDEO_INFO_FORMAT (&convert->in_info)) { case GST_VIDEO_FORMAT_GRAY8: info->frag_prog = g_strdup_printf (frag_REORDER, "", pixel_order[0], pixel_order[0], pixel_order[0], pixel_order[3]); break; case GST_VIDEO_FORMAT_GRAY16_LE: { char val2 = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_COMPOSE, val2, 'r', pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); break; } case GST_VIDEO_FORMAT_GRAY16_BE: { char val2 = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_COMPOSE, 'r', val2, pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); break; } default: break; } g_free (pixel_order); }
static gchar * _mangle_external_image_extension (const gchar * str, GstGLContext * context, GstGLTextureTarget from, GstGLTextureTarget to, GstGLSLVersion version, GstGLSLProfile profile) { GST_DEBUG ("is oes? %d, profile == ES? %d, version >= 300? %d, " "have essl3? %d", to == GST_GL_TEXTURE_TARGET_EXTERNAL_OES, profile == GST_GLSL_PROFILE_ES, version >= GST_GLSL_VERSION_300, gst_gl_context_check_feature (context, "GL_OES_EGL_image_external_essl3")); /* replace GL_OES_EGL_image_external with GL_OES_EGL_image_external_essl3 where supported */ if (to == GST_GL_TEXTURE_TARGET_EXTERNAL_OES && profile == GST_GLSL_PROFILE_ES && version >= GST_GLSL_VERSION_300) { if (gst_gl_context_check_feature (context, "GL_OES_EGL_image_external_essl3")) { GRegex *regex = g_regex_new ( /* '#extension ' with optional spacing */ "(#[ \\t]*extension[ \\t]+)" /* what we're looking to replace */ "GL_OES_EGL_image_external" /* ':' with optional spacing */ "([ \\t]*:[ \\t]*" /* some word like require, disable, etc followed by spacing and a newline */ "\\S+[ \\t]*\\R)", 0, 0, NULL); gchar *tmp = g_regex_replace (regex, str, -1, 0, "\\1GL_OES_EGL_image_external_essl3\\2", 0, NULL); g_regex_unref (regex); return tmp; } else { GST_FIXME ("Undefined situation detected. GLES3 supported but " "GL_OES_EGL_image_external_essl3 not supported. Falling back to the " "older GL_OES_EGL_image_external extension"); return g_strdup (str); } } else { return g_strdup (str); } }
static gchar * _mangle_texture_access (const gchar * str, GstGLContext * context, GstGLTextureTarget from, GstGLTextureTarget to, GstGLSLVersion version, GstGLSLProfile profile) { const gchar *from_str = NULL, *to_str = NULL; gchar *ret, *tmp; gchar *regex_find; GRegex *regex; if (from == GST_GL_TEXTURE_TARGET_2D) from_str = "texture2D"; if (from == GST_GL_TEXTURE_TARGET_RECTANGLE) from_str = "texture2DRect"; if (from == GST_GL_TEXTURE_TARGET_EXTERNAL_OES) from_str = "texture2D"; /* GL3 || gles3 but not when external-oes unless the image_external_essl3 extension is supported */ if (profile == GST_GLSL_PROFILE_CORE || (profile == GST_GLSL_PROFILE_ES && version >= GST_GLSL_VERSION_300 && (to != GST_GL_TEXTURE_TARGET_EXTERNAL_OES || gst_gl_context_check_feature (context, "GL_OES_EGL_image_external_essl3")))) { to_str = "texture"; } else { if (to == GST_GL_TEXTURE_TARGET_2D) to_str = "texture2D"; if (to == GST_GL_TEXTURE_TARGET_RECTANGLE) to_str = "texture2DRect"; if (to == GST_GL_TEXTURE_TARGET_EXTERNAL_OES) to_str = "texture2D"; } /* followed by any amount of whitespace then a bracket */ regex_find = g_strdup_printf ("%s(?=\\s*\\()", from_str); regex = g_regex_new (regex_find, 0, 0, NULL); tmp = g_regex_replace_literal (regex, str, -1, 0, to_str, 0, NULL); g_free (regex_find); g_regex_unref (regex); if (tmp) { ret = tmp; } else { GST_FIXME ("Couldn't mangle texture access successfully from %s to %s", from_str, to_str); ret = g_strdup (str); } return ret; }
static gboolean _gl_mem_create (GstGLMemoryEGL * gl_mem, GError ** error) { GstGLContext *context = gl_mem->mem.mem.context; GstGLContextEGL *ctx_egl = GST_GL_CONTEXT_EGL (context); const GstGLFuncs *gl = context->gl_vtable; GstGLBaseMemoryAllocatorClass *alloc_class; if (!gst_gl_context_check_feature (GST_GL_CONTEXT (context), "EGL_KHR_image_base")) { g_set_error (error, GST_GL_CONTEXT_ERROR, GST_GL_CONTEXT_ERROR_WRONG_API, "EGL_KHR_image_base is not supported"); return FALSE; } alloc_class = GST_GL_BASE_MEMORY_ALLOCATOR_CLASS (parent_class); if (!alloc_class->create ((GstGLBaseMemory *) gl_mem, error)) return FALSE; if (gl_mem->image == NULL) { EGLImageKHR image = ctx_egl->eglCreateImageKHR (ctx_egl->egl_display, ctx_egl->egl_context, EGL_GL_TEXTURE_2D_KHR, (EGLClientBuffer) (guintptr) gl_mem->mem.tex_id, NULL); GST_TRACE ("Generating EGLImage handle:%p from a texture:%u", gl_mem->image, gl_mem->mem.tex_id); if (eglGetError () != EGL_SUCCESS) { g_set_error (error, GST_GL_CONTEXT_ERROR, GST_GL_CONTEXT_ERROR_FAILED, "Failed to create EGLImage"); return FALSE; } gl_mem->image = gst_egl_image_new_wrapped (context, image, 0, 0, NULL, (GstEGLImageDestroyNotify) _destroy_egl_image); } else { gl->ActiveTexture (GL_TEXTURE0 + gl_mem->mem.plane); gl->BindTexture (GL_TEXTURE_2D, gl_mem->mem.tex_id); gl->EGLImageTargetTexture2D (GL_TEXTURE_2D, gst_egl_image_get_image (GST_EGL_IMAGE (gl_mem->image))); } return TRUE; }
static void _mangle_version_profile_from_gl_api (GstGLContext * context, GstGLTextureTarget from, GstGLTextureTarget to, GstGLSLVersion * version, GstGLSLProfile * profile) { GstGLAPI gl_api; gint gl_major, gl_minor; gl_api = gst_gl_context_get_gl_api (context); gst_gl_context_get_gl_version (context, &gl_major, &gl_minor); *version = GST_GLSL_VERSION_NONE; *profile = GST_GLSL_PROFILE_NONE; if (gl_api & GST_GL_API_OPENGL3) { if (gl_major > 3 || gl_minor >= 3) { *version = GST_GLSL_VERSION_330; *profile = GST_GLSL_PROFILE_CORE; } else { *version = GST_GLSL_VERSION_150; *profile = GST_GLSL_PROFILE_NONE; } } else if (gl_api & GST_GL_API_GLES2) { /* We don't know which texture function to use if we have GLES3 and * don't have the essl3 extension */ if (gl_major >= 3 && (to != GST_GL_TEXTURE_TARGET_EXTERNAL_OES || gst_gl_context_check_feature (context, "GL_OES_EGL_image_external_essl3"))) { *version = GST_GLSL_VERSION_300; *profile = GST_GLSL_PROFILE_ES; } else if (gl_major >= 2) { *version = GST_GLSL_VERSION_100; *profile = GST_GLSL_PROFILE_ES; } } else if (gl_api & GST_GL_API_OPENGL) { *version = GST_GLSL_VERSION_110; *profile = GST_GLSL_PROFILE_COMPATIBILITY; } }
/** * gst_vaapi_plugin_base_set_gl_context: * @plugin: a #GstVaapiPluginBase * @object: the new GL context from downstream * * Registers the new GL context. The change is effective at the next * call to gst_vaapi_plugin_base_ensure_display(), where the * underlying display object could be re-allocated to fit the GL * context needs */ void gst_vaapi_plugin_base_set_gl_context (GstVaapiPluginBase * plugin, GstObject * object) { #if USE_GST_GL_HELPERS GstGLContext *const gl_context = GST_GL_CONTEXT (object); GstVaapiDisplayType display_type; if (plugin->gl_context == object) return; gst_object_replace (&plugin->gl_context, object); switch (gst_gl_context_get_gl_platform (gl_context)) { #if USE_GLX case GST_GL_PLATFORM_GLX: display_type = GST_VAAPI_DISPLAY_TYPE_GLX; break; #endif case GST_GL_PLATFORM_EGL: #if VA_CHECK_VERSION (0,36,0) && USE_GST_GL_HELPERS plugin->srcpad_can_dmabuf = (!(gst_gl_context_get_gl_api (gl_context) & GST_GL_API_GLES1) && gst_gl_context_check_feature (gl_context, "EGL_EXT_image_dma_buf_import")); #endif #if USE_EGL display_type = GST_VAAPI_DISPLAY_TYPE_EGL; break; #endif default: display_type = plugin->display_type; break; } GST_INFO_OBJECT (plugin, "GL context: %" GST_PTR_FORMAT, plugin->gl_context); gst_vaapi_plugin_base_set_display_type (plugin, display_type); #endif }
/** * gst_gl_texture_type_from_format: * @context: a #GstGLContext * @v_format: a #GstVideoFormat * @plane: the plane number (starting from 0) * * Returns: the #GstVideoGLTextureType for the specified @format and @plane * that can be allocated using @context */ GstVideoGLTextureType gst_gl_texture_type_from_format (GstGLContext * context, GstVideoFormat v_format, guint plane) { gboolean texture_rg = gst_gl_context_check_feature (context, "GL_EXT_texture_rg") || gst_gl_context_check_gl_version (context, GST_GL_API_GLES2, 3, 0) || gst_gl_context_check_feature (context, "GL_ARB_texture_rg") || gst_gl_context_check_gl_version (context, GST_GL_API_OPENGL3, 3, 0); guint n_plane_components; switch (v_format) { case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_AYUV: n_plane_components = 4; break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: n_plane_components = 3; break; case GST_VIDEO_FORMAT_RGB16: case GST_VIDEO_FORMAT_BGR16: return GST_VIDEO_GL_TEXTURE_TYPE_RGB16; case GST_VIDEO_FORMAT_GRAY16_BE: case GST_VIDEO_FORMAT_GRAY16_LE: case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_UYVY: n_plane_components = 2; break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: n_plane_components = plane == 0 ? 1 : 2; break; case GST_VIDEO_FORMAT_GRAY8: case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y42B: case GST_VIDEO_FORMAT_Y41B: case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_YV12: n_plane_components = 1; break; default: n_plane_components = 4; g_assert_not_reached (); break; } switch (n_plane_components) { case 4: return GST_VIDEO_GL_TEXTURE_TYPE_RGBA; break; case 3: return GST_VIDEO_GL_TEXTURE_TYPE_RGB; break; case 2: return texture_rg ? GST_VIDEO_GL_TEXTURE_TYPE_RG : GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA; break; case 1: return texture_rg ? GST_VIDEO_GL_TEXTURE_TYPE_R : GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE; break; default: g_assert_not_reached (); break; } return GST_VIDEO_GL_TEXTURE_TYPE_RGBA; }
gboolean gst_egl_image_memory_setup_buffer (GstGLContext * ctx, GstVideoInfo * info, GstBuffer * buffer) { gint i = 0; gint stride[3]; gsize offset[3]; GstMemory *mem[3] = { NULL, NULL, NULL }; guint n_mem = 0; GstMemoryFlags flags = 0; EGLImageKHR image = EGL_NO_IMAGE_KHR; EGLClientBuffer client_buffer_tex[3] = { 0, 0, 0 }; GstVideoGLTextureType texture_types[] = { 0, 0, 0, 0 }; GstEGLImageAllocator *allocator = gst_egl_image_allocator_obtain (); GstGLContextEGL *context = GST_GL_CONTEXT_EGL (ctx); g_return_val_if_fail (ctx, FALSE); g_return_val_if_fail (info, FALSE); g_return_val_if_fail (buffer, FALSE); g_return_val_if_fail (gst_gl_context_check_feature (ctx, "EGL_KHR_image_base"), FALSE); memset (stride, 0, sizeof (stride)); memset (offset, 0, sizeof (offset)); flags |= GST_MEMORY_FLAG_NOT_MAPPABLE; flags |= GST_MEMORY_FLAG_NO_SHARE; switch (GST_VIDEO_INFO_FORMAT (info)) { case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_RGB16: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_AYUV: { gsize size = 0; switch (GST_VIDEO_INFO_FORMAT (info)) { case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_RGB16: { texture_types[0] = GST_VIDEO_GL_TEXTURE_TYPE_RGB; break; } case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_AYUV: { texture_types[0] = GST_VIDEO_GL_TEXTURE_TYPE_RGBA; break; } default: g_assert_not_reached (); break; } #if 0 mem[0] = gst_egl_image_allocator_alloc (allocator, context, texture_types[0], GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), size); if (mem[0]) { stride[0] = size / GST_VIDEO_INFO_HEIGHT (info); n_mem = 1; GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE); } else #endif { gst_gl_generate_texture_full (GST_GL_CONTEXT (context), info, 0, stride, offset, &size, (GLuint *) & client_buffer_tex[0]); image = context->eglCreateImage (context->egl_display, context->egl_context, EGL_GL_TEXTURE_2D_KHR, client_buffer_tex[0], NULL); if (eglGetError () != EGL_SUCCESS) goto mem_error; mem[0] = gst_egl_image_allocator_wrap (allocator, context, image, texture_types[0], flags, size, client_buffer_tex[0], (GstEGLImageDestroyNotify) gst_egl_image_memory_del_gl_texture); n_mem = 1; } break; } case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: { gsize size[2]; texture_types[0] = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE; texture_types[1] = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA; #if 0 mem[0] = gst_egl_image_allocator_alloc (allocator, context, texture_types[0], GST_VIDEO_INFO_COMP_WIDTH (info, 0), GST_VIDEO_INFO_COMP_HEIGHT (info, 0), size[0]); mem[1] = gst_egl_image_allocator_alloc (allocator, context, texture_types[1], GST_VIDEO_INFO_COMP_WIDTH (info, 1), GST_VIDEO_INFO_COMP_HEIGHT (info, 1), size[1]); if (mem[0] && mem[1]) { stride[0] = size[0] / GST_VIDEO_INFO_HEIGHT (info); offset[1] = size[0]; stride[1] = size[1] / GST_VIDEO_INFO_HEIGHT (info); n_mem = 2; GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE); GST_MINI_OBJECT_FLAG_SET (mem[1], GST_MEMORY_FLAG_NO_SHARE); } else { if (mem[0]) gst_memory_unref (mem[0]); if (mem[1]) gst_memory_unref (mem[1]); mem[0] = mem[1] = NULL; } #endif { for (i = 0; i < 2; i++) { gst_gl_generate_texture_full (GST_GL_CONTEXT (context), info, 0, stride, offset, size, (GLuint *) & client_buffer_tex[i]); image = context->eglCreateImage (context->egl_display, context->egl_context, EGL_GL_TEXTURE_2D_KHR, client_buffer_tex[i], NULL); if (eglGetError () != EGL_SUCCESS) goto mem_error; mem[i] = gst_egl_image_allocator_wrap (allocator, context, image, texture_types[i], flags, size[i], client_buffer_tex[i], (GstEGLImageDestroyNotify) gst_egl_image_memory_del_gl_texture); } n_mem = 2; } break; } case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_YV12: case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y42B: case GST_VIDEO_FORMAT_Y41B: { gsize size[3]; texture_types[0] = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE; texture_types[1] = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE; texture_types[2] = GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE; #if 0 mem[0] = gst_egl_image_allocator_alloc (allocator, context, texture_types[0], GST_VIDEO_INFO_COMP_WIDTH (info, 0), GST_VIDEO_INFO_COMP_HEIGHT (info, 0), size[0]); mem[1] = gst_egl_image_allocator_alloc (allocator, context, texture_types[1], GST_VIDEO_INFO_COMP_WIDTH (info, 1), GST_VIDEO_INFO_COMP_HEIGHT (info, 1), size[1]); mem[2] = gst_egl_image_allocator_alloc (allocator, context, texture_types[2], GST_VIDEO_INFO_COMP_WIDTH (info, 2), GST_VIDEO_INFO_COMP_HEIGHT (info, 2), size[2]); if (mem[0] && mem[1] && mem[2]) { stride[0] = size[0] / GST_VIDEO_INFO_HEIGHT (info); offset[1] = size[0]; stride[1] = size[1] / GST_VIDEO_INFO_HEIGHT (info); offset[2] = size[1]; stride[2] = size[2] / GST_VIDEO_INFO_HEIGHT (info); n_mem = 3; GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE); GST_MINI_OBJECT_FLAG_SET (mem[1], GST_MEMORY_FLAG_NO_SHARE); GST_MINI_OBJECT_FLAG_SET (mem[2], GST_MEMORY_FLAG_NO_SHARE); } else { if (mem[0]) gst_memory_unref (mem[0]); if (mem[1]) gst_memory_unref (mem[1]); if (mem[2]) gst_memory_unref (mem[2]); mem[0] = mem[1] = mem[2] = NULL; } #endif { for (i = 0; i < 3; i++) { gst_gl_generate_texture_full (GST_GL_CONTEXT (context), info, i, stride, offset, size, (GLuint *) & client_buffer_tex[i]); image = context->eglCreateImage (context->egl_display, context->egl_context, EGL_GL_TEXTURE_2D_KHR, client_buffer_tex[i], NULL); if (eglGetError () != EGL_SUCCESS) goto mem_error; mem[i] = gst_egl_image_allocator_wrap (allocator, context, image, texture_types[i], flags, size[i], client_buffer_tex[i], (GstEGLImageDestroyNotify) gst_egl_image_memory_del_gl_texture); } n_mem = 3; } break; } default: g_assert_not_reached (); break; } gst_buffer_add_video_meta_full (buffer, 0, GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info), offset, stride); gst_buffer_add_video_gl_texture_upload_meta (buffer, gst_egl_image_memory_get_orientation (mem[0]), n_mem, texture_types, gst_eglimage_to_gl_texture_upload_meta, NULL, NULL, NULL); for (i = 0; i < n_mem; i++) gst_buffer_append_memory (buffer, mem[i]); return TRUE; mem_error: { GST_CAT_ERROR (GST_CAT_DEFAULT, "Failed to create EGLImage"); for (i = 0; i < 3; i++) { if (client_buffer_tex[i]) gst_gl_context_del_texture (ctx, (GLuint *) & client_buffer_tex[i]); if (mem[i]) gst_memory_unref (mem[i]); } return FALSE; } }
static gboolean gst_glimage_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query) { GstGLImageSink *glimage_sink = GST_GLIMAGE_SINK (bsink); GstBufferPool *pool; GstStructure *config; GstCaps *caps; guint size; gboolean need_pool; GstStructure *gl_context; gchar *platform, *gl_apis; gpointer handle; GstAllocator *allocator = NULL; GstAllocationParams params; if (!_ensure_gl_setup (glimage_sink)) return FALSE; gst_query_parse_allocation (query, &caps, &need_pool); if (caps == NULL) goto no_caps; if ((pool = glimage_sink->pool)) gst_object_ref (pool); if (pool != NULL) { GstCaps *pcaps; /* we had a pool, check caps */ GST_DEBUG_OBJECT (glimage_sink, "check existing pool caps"); config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_get_params (config, &pcaps, &size, NULL, NULL); if (!gst_caps_is_equal (caps, pcaps)) { GST_DEBUG_OBJECT (glimage_sink, "pool has different caps"); /* different caps, we can't use this pool */ gst_object_unref (pool); pool = NULL; } gst_structure_free (config); } if (pool == NULL && need_pool) { GstVideoInfo info; if (!gst_video_info_from_caps (&info, caps)) goto invalid_caps; GST_DEBUG_OBJECT (glimage_sink, "create new pool"); pool = gst_gl_buffer_pool_new (glimage_sink->context); /* the normal size of a frame */ size = info.size; config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, caps, size, 0, 0); if (!gst_buffer_pool_set_config (pool, config)) goto config_failed; } /* we need at least 2 buffer because we hold on to the last one */ if (pool) { gst_query_add_allocation_pool (query, pool, size, 2, 0); gst_object_unref (pool); } /* we also support various metadata */ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, 0); gl_apis = gst_gl_api_to_string (gst_gl_context_get_gl_api (glimage_sink->context)); platform = gst_gl_platform_to_string (gst_gl_context_get_gl_platform (glimage_sink->context)); handle = (gpointer) gst_gl_context_get_gl_context (glimage_sink->context); gl_context = gst_structure_new ("GstVideoGLTextureUploadMeta", "gst.gl.GstGLContext", GST_GL_TYPE_CONTEXT, glimage_sink->context, "gst.gl.context.handle", G_TYPE_POINTER, handle, "gst.gl.context.type", G_TYPE_STRING, platform, "gst.gl.context.apis", G_TYPE_STRING, gl_apis, NULL); gst_query_add_allocation_meta (query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, gl_context); g_free (gl_apis); g_free (platform); gst_structure_free (gl_context); gst_allocation_params_init (¶ms); allocator = gst_allocator_find (GST_GL_MEMORY_ALLOCATOR); gst_query_add_allocation_param (query, allocator, ¶ms); gst_object_unref (allocator); #if GST_GL_HAVE_PLATFORM_EGL if (gst_gl_context_check_feature (glimage_sink->context, "EGL_KHR_image_base")) { allocator = gst_allocator_find (GST_EGL_IMAGE_MEMORY_TYPE); gst_query_add_allocation_param (query, allocator, ¶ms); gst_object_unref (allocator); } #endif return TRUE; /* ERRORS */ no_caps: { GST_DEBUG_OBJECT (bsink, "no caps specified"); return FALSE; } invalid_caps: { GST_DEBUG_OBJECT (bsink, "invalid caps specified"); return FALSE; } config_failed: { GST_DEBUG_OBJECT (bsink, "failed setting config"); return FALSE; } }
static void _YUV_to_RGB (GstGLColorConvert * convert) { struct ConvertInfo *info = &convert->priv->convert_info; GstVideoFormat out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info); const gchar *out_format_str = gst_video_format_to_string (out_format); gchar *pixel_order = _RGB_pixel_order ("rgba", out_format_str); #if GST_GL_HAVE_PLATFORM_EAGL gboolean texture_rg = FALSE; #else gboolean texture_rg = gst_gl_context_check_feature (convert->context, "GL_EXT_texture_rg") || gst_gl_context_check_feature (convert->context, "GL_ARB_texture_rg"); #endif info->out_n_textures = 1; switch (GST_VIDEO_INFO_FORMAT (&convert->in_info)) { case GST_VIDEO_FORMAT_AYUV: info->frag_prog = g_strdup_printf (frag_AYUV_to_RGB, pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 1; info->shader_tex_names[0] = "tex"; break; case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y42B: case GST_VIDEO_FORMAT_Y41B: info->frag_prog = g_strdup_printf (frag_PLANAR_YUV_to_RGB, pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 3; info->shader_tex_names[0] = "Ytex"; info->shader_tex_names[1] = "Utex"; info->shader_tex_names[2] = "Vtex"; break; case GST_VIDEO_FORMAT_YV12: info->frag_prog = g_strdup_printf (frag_PLANAR_YUV_to_RGB, pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 3; info->shader_tex_names[0] = "Ytex"; info->shader_tex_names[1] = "Vtex"; info->shader_tex_names[2] = "Utex"; break; case GST_VIDEO_FORMAT_YUY2: { char uv_val = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_YUY2_UYVY_to_RGB, 'r', uv_val, uv_val, 'g', 'a', pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 1; info->shader_tex_names[0] = "Ytex"; break; } case GST_VIDEO_FORMAT_NV12: { char val2 = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_NV12_NV21_to_RGB, 'r', val2, pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 2; info->shader_tex_names[0] = "Ytex"; info->shader_tex_names[1] = "UVtex"; break; } case GST_VIDEO_FORMAT_NV21: { char val2 = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_NV12_NV21_to_RGB, val2, 'r', pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 2; info->shader_tex_names[0] = "Ytex"; info->shader_tex_names[1] = "UVtex"; break; } case GST_VIDEO_FORMAT_UYVY: { char y_val = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_YUY2_UYVY_to_RGB, y_val, 'g', 'g', 'r', 'b', pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->in_n_textures = 1; info->shader_tex_names[0] = "Ytex"; break; } default: break; } if (gst_video_colorimetry_matches (&convert->in_info.colorimetry, GST_VIDEO_COLORIMETRY_BT709)) { info->cms_offset = (gfloat *) from_yuv_bt709_offset; info->cms_coeff1 = (gfloat *) from_yuv_bt709_rcoeff; info->cms_coeff2 = (gfloat *) from_yuv_bt709_gcoeff; info->cms_coeff3 = (gfloat *) from_yuv_bt709_bcoeff; } else { /* defaults/bt601 */ info->cms_offset = (gfloat *) from_yuv_bt601_offset; info->cms_coeff1 = (gfloat *) from_yuv_bt601_rcoeff; info->cms_coeff2 = (gfloat *) from_yuv_bt601_gcoeff; info->cms_coeff3 = (gfloat *) from_yuv_bt601_bcoeff; } g_free (pixel_order); }