static void _RGB_to_RGB (GstGLColorConvert * convert) { struct ConvertInfo *info = &convert->priv->convert_info; GstVideoFormat in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info); const gchar *in_format_str = gst_video_format_to_string (in_format); GstVideoFormat out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info); const gchar *out_format_str = gst_video_format_to_string (out_format); gchar *pixel_order = _RGB_pixel_order (in_format_str, out_format_str); gchar *alpha = NULL; info->in_n_textures = 1; info->out_n_textures = 1; if (_is_RGBx (in_format)) { int i; char input_alpha_channel = 'a'; for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) { if (in_format_str[i] == 'X' || in_format_str[i] == 'x') { input_alpha_channel = _index_to_shader_swizzle (i); break; } } alpha = g_strdup_printf ("t.%c = 1.0;", input_alpha_channel); } info->frag_prog = g_strdup_printf (frag_REORDER, alpha ? alpha : "", pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); info->shader_tex_names[0] = "tex"; g_free (alpha); g_free (pixel_order); }
static void _RGB_to_GRAY (GstGLColorConvert * convert) { struct ConvertInfo *info = &convert->priv->convert_info; GstVideoFormat in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info); const gchar *in_format_str = gst_video_format_to_string (in_format); gchar *pixel_order = _RGB_pixel_order (in_format_str, "rgba"); gchar *alpha = NULL; info->in_n_textures = 1; info->out_n_textures = 1; info->shader_tex_names[0] = "tex"; if (_is_RGBx (in_format)) alpha = g_strdup_printf ("t.%c = 1.0;", pixel_order[3]); switch (GST_VIDEO_INFO_FORMAT (&convert->out_info)) { case GST_VIDEO_FORMAT_GRAY8: info->frag_prog = g_strdup_printf (frag_REORDER, alpha ? alpha : "", pixel_order[0], pixel_order[0], pixel_order[0], pixel_order[3]); break; default: break; } g_free (alpha); g_free (pixel_order); }
static gboolean meta_texture_ensure_format (GstVaapiVideoMetaTexture * meta, GstVideoFormat format) { switch (format) { case GST_VIDEO_FORMAT_RGBA: meta->gl_format = GL_RGBA; meta->texture_type = GST_VIDEO_GL_TEXTURE_TYPE_RGBA; break; case GST_VIDEO_FORMAT_BGRA: meta->gl_format = GL_BGRA_EXT; /* FIXME: add GST_VIDEO_GL_TEXTURE_TYPE_BGRA extension */ meta->texture_type = GST_VIDEO_GL_TEXTURE_TYPE_RGBA; break; default: goto error_unsupported_format; } return TRUE; /* ERRORS */ error_unsupported_format: GST_ERROR ("unsupported texture format %s", gst_video_format_to_string (format)); return FALSE; }
static gboolean _init_download (GstGLDownload * download) { GstVideoFormat v_format; guint out_width, out_height; GstVideoInfo in_info; v_format = GST_VIDEO_INFO_FORMAT (&download->info); out_width = GST_VIDEO_INFO_WIDTH (&download->info); out_height = GST_VIDEO_INFO_HEIGHT (&download->info); if (download->initted) return TRUE; GST_TRACE ("initializing texture download for format %s", gst_video_format_to_string (v_format)); if (USING_GLES2 (download->context) && !USING_GLES3 (download->context)) { /* GL_RGBA is the only officially supported texture format in GLES2 */ if (v_format == GST_VIDEO_FORMAT_RGB || v_format == GST_VIDEO_FORMAT_BGR) { gst_gl_context_set_error (download->context, "Cannot download RGB " "textures in GLES2"); return FALSE; } } gst_video_info_set_format (&in_info, GST_VIDEO_FORMAT_RGBA, out_width, out_height); gst_gl_color_convert_set_format (download->convert, &in_info, &download->info); return TRUE; }
static GstCaps * _gst_video_format_new_template_caps (GstVideoFormat format) { return gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string (format), "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); }
static guint get_num_formats (void) { guint i = 2; while (gst_video_format_to_string ((GstFormat) i) != NULL) ++i; return i; }
static GstFlowReturn gst_cairo_overlay_transform_frame_ip (GstVideoFilter * vfilter, GstVideoFrame * frame) { GstCairoOverlay *overlay = GST_CAIRO_OVERLAY (vfilter); cairo_surface_t *surface; cairo_t *cr; cairo_format_t format; switch (GST_VIDEO_FRAME_FORMAT (frame)) { case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_BGRA: format = CAIRO_FORMAT_ARGB32; break; case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_BGRx: format = CAIRO_FORMAT_RGB24; break; case GST_VIDEO_FORMAT_RGB16: format = CAIRO_FORMAT_RGB16_565; break; default: { GST_WARNING ("No matching cairo format for %s", gst_video_format_to_string (GST_VIDEO_FRAME_FORMAT (frame))); return GST_FLOW_ERROR; } } surface = cairo_image_surface_create_for_data (GST_VIDEO_FRAME_PLANE_DATA (frame, 0), format, GST_VIDEO_FRAME_WIDTH (frame), GST_VIDEO_FRAME_HEIGHT (frame), GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0)); if (G_UNLIKELY (!surface)) return GST_FLOW_ERROR; cr = cairo_create (surface); if (G_UNLIKELY (!cr)) { cairo_surface_destroy (surface); return GST_FLOW_ERROR; } g_signal_emit (overlay, gst_cairo_overlay_signals[SIGNAL_DRAW], 0, cr, GST_BUFFER_PTS (frame->buffer), GST_BUFFER_DURATION (frame->buffer), NULL); cairo_destroy (cr); cairo_surface_destroy (surface); return GST_FLOW_OK; }
gboolean gst_vaapi_value_set_format (GValue * value, GstVideoFormat format) { const gchar *str; str = gst_video_format_to_string (format); if (!str) return FALSE; g_value_init (value, G_TYPE_STRING); g_value_set_string (value, str); return TRUE; }
GstMemory * gst_droid_media_buffer_allocator_alloc_from_data (GstAllocator * allocator, GstVideoInfo * info, DroidMediaData * data, DroidMediaBufferCallbacks * cb) { GstDroidMediaBufferMemory *mem; DroidMediaBuffer *buffer; int format; GstDroidMediaBufferAllocator *alloc; if (!GST_IS_DROID_MEDIA_BUFFER_ALLOCATOR (allocator)) { GST_WARNING_OBJECT (allocator, "allocator is not the correct allocator for droidmediabuffer"); return NULL; } alloc = (GstDroidMediaBufferAllocator *) allocator; if (info->finfo->format == GST_VIDEO_FORMAT_YV12) { format = alloc->c.HAL_PIXEL_FORMAT_YV12; } else if (info->finfo->format == GST_VIDEO_FORMAT_NV21) { format = alloc->c.HAL_PIXEL_FORMAT_YCrCb_420_SP; } else { GST_WARNING_OBJECT (allocator, "Unknown GStreamer format %s", gst_video_format_to_string (info->finfo->format)); return NULL; } mem = g_slice_new0 (GstDroidMediaBufferMemory); buffer = droid_media_buffer_create_from_raw_data (info->width, info->height, GST_VIDEO_INFO_COMP_STRIDE (info, 0), GST_VIDEO_INFO_COMP_STRIDE (info, 1), format, data, cb); if (!buffer) { GST_ERROR_OBJECT (allocator, "failed to acquire media buffer"); g_slice_free (GstDroidMediaBufferMemory, mem); return NULL; } mem->buffer = buffer; gst_memory_init (GST_MEMORY_CAST (mem), GST_MEMORY_FLAG_NO_SHARE | GST_MEMORY_FLAG_NOT_MAPPABLE, allocator, NULL, 0, 0, 0, 0); GST_DEBUG_OBJECT (allocator, "alloc %p", mem); return GST_MEMORY_CAST (mem); }
static GstCaps *gst_imx_v4l2src_caps_for_current_setup(GstImxV4l2VideoSrc *v4l2src) { GstVideoFormat gst_fmt; const gchar *pixel_format = NULL; const gchar *interlace_mode = "progressive"; struct v4l2_format fmt; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed"); return NULL; } switch (fmt.fmt.pix.pixelformat) { case V4L2_PIX_FMT_YUV420: /* Special Case for handling YU12 */ pixel_format = "I420"; break; case V4L2_PIX_FMT_YUYV: /* Special Case for handling YUYV */ pixel_format = "YUY2"; break; default: gst_fmt = gst_video_format_from_fourcc(fmt.fmt.pix.pixelformat); pixel_format = gst_video_format_to_string(gst_fmt); } if (v4l2src->is_tvin && !fmt.fmt.pix.field) { fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; GST_DEBUG_OBJECT(v4l2src, "TV decoder fix up: field = V4L2_FIELD_INTERLACED"); } if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED || fmt.fmt.pix.field == V4L2_FIELD_INTERLACED_TB || fmt.fmt.pix.field == V4L2_FIELD_INTERLACED_BT) { interlace_mode = "interleaved"; } return gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, pixel_format, "width", G_TYPE_INT, v4l2src->capture_width, "height", G_TYPE_INT, v4l2src->capture_height, "interlace-mode", G_TYPE_STRING, interlace_mode, "framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL); }
static void check_pad_template (GstPadTemplate * tmpl) { const GValue *list_val, *fmt_val; GstStructure *s; gboolean *formats_supported; GstCaps *caps; guint i, num_formats; num_formats = get_num_formats (); formats_supported = g_new0 (gboolean, num_formats); caps = gst_pad_template_get_caps (tmpl); /* If this fails, we need to update this unit test */ fail_unless_equals_int (gst_caps_get_size (caps), 1); s = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_has_name (s, "video/x-raw")); list_val = gst_structure_get_value (s, "format"); fail_unless (list_val != NULL); /* If this fails, we need to update this unit test */ fail_unless (GST_VALUE_HOLDS_LIST (list_val)); for (i = 0; i < gst_value_list_get_size (list_val); ++i) { GstVideoFormat fmt; const gchar *fmt_str; fmt_val = gst_value_list_get_value (list_val, i); fail_unless (G_VALUE_HOLDS_STRING (fmt_val)); fmt_str = g_value_get_string (fmt_val); GST_LOG ("format string: '%s'", fmt_str); fmt = gst_video_format_from_string (fmt_str); fail_unless (fmt != GST_VIDEO_FORMAT_UNKNOWN); formats_supported[(guint) fmt] = TRUE; } gst_caps_unref (caps); for (i = 2; i < num_formats; ++i) { if (!formats_supported[i]) { g_error ("videoconvert doesn't support format '%s'", gst_video_format_to_string ((GstVideoFormat) i)); } } g_free (formats_supported); }
GstCaps * gst_vaapi_video_format_new_template_caps (GstVideoFormat format) { GstCaps *caps; g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL); caps = gst_caps_new_empty_simple ("video/x-raw"); if (!caps) return NULL; gst_caps_set_simple (caps, "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL); set_video_template_caps (caps); return caps; }
static void grd_vnc_sink_init (GrdVncSink *vnc_sink) { const char *video_format_string; g_mutex_init (&vnc_sink->lock); video_format_string = gst_video_format_to_string (GST_VIDEO_FORMAT_RGBx); vnc_sink->caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, video_format_string, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); }
void caps_append(GstCaps *caps) { GstStructure *structure; std::map<uint32_t, GstVideoFormat>::iterator iter; for (iter = fourcc2fmt.begin(); iter != fourcc2fmt.end(); iter++) { structure = gst_structure_new ("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string (iter->second), NULL); gst_structure_set (structure, "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE, "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL); gst_caps_append_structure (caps, structure); } }
static void _add_vk_format_to_list (GValue * list, VkFormat format) { GstVideoFormat v_format; const gchar *format_str; v_format = _vk_format_to_video_format (format); if (v_format) { GValue item = G_VALUE_INIT; g_value_init (&item, G_TYPE_STRING); format_str = gst_video_format_to_string (v_format); g_value_set_string (&item, format_str); gst_value_list_append_value (list, &item); g_value_unset (&item); } }
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src) { GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src); GstCaps *caps; GstVideoFormat gst_fmt; const gchar *pixel_format = NULL; const gchar *interlace_mode = "progressive"; struct v4l2_format fmt; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) { GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed"); return FALSE; } switch (fmt.fmt.pix.pixelformat) { case V4L2_PIX_FMT_YUV420: /* Special Case for handling YU12 */ pixel_format = "I420"; break; case V4L2_PIX_FMT_YUYV: /* Special Case for handling YUYV */ pixel_format = "YUY2"; break; default: gst_fmt = gst_video_format_from_fourcc(fmt.fmt.pix.pixelformat); pixel_format = gst_video_format_to_string(gst_fmt); } if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) interlace_mode = "interleaved"; /* not much to negotiate; * we already performed setup, so that is what will be streamed */ caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, pixel_format, "width", G_TYPE_INT, v4l2src->capture_width, "height", G_TYPE_INT, v4l2src->capture_height, "interlace-mode", G_TYPE_STRING, interlace_mode, "framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL); GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps); return gst_base_src_set_caps(src, caps); }
GstMemory * gst_droid_media_buffer_allocator_alloc_from_data (GstAllocator * allocator, GstVideoInfo * info, DroidMediaData * data, DroidMediaBufferCallbacks * cb) { GstDroidMediaBufferMemory *mem; DroidMediaBuffer *buffer; DroidMediaBufferInfo droid_info; int format_index; if (!GST_IS_DROID_MEDIA_BUFFER_ALLOCATOR (allocator)) { GST_WARNING_OBJECT (allocator, "allocator is not the correct allocator for droidmediabuffer"); return NULL; } format_index = gst_droid_media_buffer_index_of_gst_format (info->finfo->format); if (format_index == GST_DROID_MEDIA_BUFFER_FORMAT_COUNT) { GST_WARNING_OBJECT (allocator, "Unknown GStreamer format %s", gst_video_format_to_string (info->finfo->format)); return NULL; } buffer = droid_media_buffer_create_from_raw_data (info->width, info->height, GST_VIDEO_INFO_COMP_STRIDE (info, 0), GST_VIDEO_INFO_COMP_STRIDE (info, 1), gst_droid_media_buffer_formats[format_index].hal_format, data, cb); if (!buffer) { GST_ERROR_OBJECT (allocator, "failed to acquire media buffer"); return NULL; } droid_media_buffer_get_info (buffer, &droid_info); mem = gst_droid_media_buffer_allocator_alloc_from_buffer (allocator, buffer, format_index, info->width, info->height, droid_info.stride); GST_DEBUG_OBJECT (allocator, "alloc %p", mem); return GST_MEMORY_CAST (mem); }
static void _GRAY_to_RGB (GstGLColorConvert * convert) { struct ConvertInfo *info = &convert->priv->convert_info; GstVideoFormat out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info); const gchar *out_format_str = gst_video_format_to_string (out_format); gchar *pixel_order = _RGB_pixel_order ("rgba", out_format_str); #if GST_GL_HAVE_PLATFORM_EAGL gboolean texture_rg = FALSE; #else gboolean texture_rg = gst_gl_context_check_feature (convert->context, "GL_EXT_texture_rg") || gst_gl_context_check_feature (convert->context, "GL_ARB_texture_rg"); #endif info->in_n_textures = 1; info->out_n_textures = 1; info->shader_tex_names[0] = "tex"; switch (GST_VIDEO_INFO_FORMAT (&convert->in_info)) { case GST_VIDEO_FORMAT_GRAY8: info->frag_prog = g_strdup_printf (frag_REORDER, "", pixel_order[0], pixel_order[0], pixel_order[0], pixel_order[3]); break; case GST_VIDEO_FORMAT_GRAY16_LE: { char val2 = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_COMPOSE, val2, 'r', pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); break; } case GST_VIDEO_FORMAT_GRAY16_BE: { char val2 = texture_rg ? 'g' : 'a'; info->frag_prog = g_strdup_printf (frag_COMPOSE, 'r', val2, pixel_order[0], pixel_order[1], pixel_order[2], pixel_order[3]); break; } default: break; } g_free (pixel_order); }
bool BaseDelegate::event(QEvent *event) { switch((int) event->type()) { case BufferEventType: { BufferEvent *bufEvent = dynamic_cast<BufferEvent*>(event); Q_ASSERT(bufEvent); GST_TRACE_OBJECT(m_sink, "Received buffer %"GST_PTR_FORMAT, bufEvent->buffer); if (isActive()) { gst_buffer_replace (&m_buffer, bufEvent->buffer); update(); } return true; } case BufferFormatEventType: { BufferFormatEvent *bufFmtEvent = dynamic_cast<BufferFormatEvent*>(event); Q_ASSERT(bufFmtEvent); GST_TRACE_OBJECT (m_sink, "Received buffer format event. New format: %s", gst_video_format_to_string(bufFmtEvent->format.videoFormat())); m_formatDirty = true; m_bufferFormat = bufFmtEvent->format; return true; } case DeactivateEventType: { GST_LOG_OBJECT(m_sink, "Received deactivate event"); g_clear_pointer(&m_buffer, gst_buffer_unref); update(); return true; } default: return QObject::event(event); } }
static gboolean ensure_allowed_caps (GstKMSSink * self, drmModePlane * plane, drmModeRes * res) { GstCaps *out_caps, *caps; int i; GstVideoFormat fmt; const gchar *format; if (self->allowed_caps) return TRUE; out_caps = gst_caps_new_empty (); if (!out_caps) return FALSE; for (i = 0; i < plane->count_formats; i++) { fmt = gst_video_format_from_drm (plane->formats[i]); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) { GST_INFO_OBJECT (self, "ignoring format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (plane->formats[i])); continue; } format = gst_video_format_to_string (fmt); caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", GST_TYPE_INT_RANGE, res->min_width, res->max_width, "height", GST_TYPE_INT_RANGE, res->min_height, res->max_height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); if (!caps) continue; out_caps = gst_caps_merge (out_caps, caps); } self->allowed_caps = gst_caps_simplify (out_caps); GST_DEBUG_OBJECT (self, "allowed caps = %" GST_PTR_FORMAT, self->allowed_caps); return TRUE; }
/* Called in the gl thread */ static gboolean _init_upload (GstGLUpload * upload) { GstGLFuncs *gl; GstVideoFormat v_format; GstVideoInfo out_info; gl = upload->context->gl_vtable; v_format = GST_VIDEO_INFO_FORMAT (&upload->in_info); GST_INFO ("Initializing texture upload for format:%s", gst_video_format_to_string (v_format)); if (!gl->CreateProgramObject && !gl->CreateProgram) { gst_gl_context_set_error (upload->context, "Cannot upload YUV formats without OpenGL shaders"); goto error; } gst_video_info_set_format (&out_info, GST_VIDEO_FORMAT_RGBA, GST_VIDEO_INFO_WIDTH (&upload->in_info), GST_VIDEO_INFO_HEIGHT (&upload->in_info)); gst_gl_color_convert_set_format (upload->convert, &upload->in_info, &out_info); upload->out_tex = gst_gl_memory_wrapped_texture (upload->context, 0, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&upload->in_info), GST_VIDEO_INFO_HEIGHT (&upload->in_info), NULL, NULL); upload->initted = TRUE; return TRUE; error: return FALSE; }
static guint32 gst_imx_ipu_blitter_get_v4l_format(GstVideoFormat format) { switch (format) { /* These formats are defined in ipu.h , but the IPU reports them as * being unsupported. * TODO: It is currently not known how to find out which formats are supported, * or if different i.MX versions support different formats. */ #if 0 case GST_VIDEO_FORMAT_RGB15: return IPU_PIX_FMT_RGB555; case GST_VIDEO_FORMAT_GBR: return IPU_PIX_FMT_GBR24; case GST_VIDEO_FORMAT_YVYU: return IPU_PIX_FMT_YVYU; case GST_VIDEO_FORMAT_IYU1: return IPU_PIX_FMT_Y41P; case GST_VIDEO_FORMAT_GRAY8: return IPU_PIX_FMT_GREY; case GST_VIDEO_FORMAT_YVU9: return IPU_PIX_FMT_YVU410P; case GST_VIDEO_FORMAT_YUV9: return IPU_PIX_FMT_YUV410P; #endif case GST_VIDEO_FORMAT_RGB16: return IPU_PIX_FMT_RGB565; case GST_VIDEO_FORMAT_BGR: return IPU_PIX_FMT_BGR24; case GST_VIDEO_FORMAT_RGB: return IPU_PIX_FMT_RGB24; case GST_VIDEO_FORMAT_BGRx: return IPU_PIX_FMT_BGR32; case GST_VIDEO_FORMAT_BGRA: return IPU_PIX_FMT_BGRA32; case GST_VIDEO_FORMAT_RGBx: return IPU_PIX_FMT_RGB32; case GST_VIDEO_FORMAT_RGBA: return IPU_PIX_FMT_RGBA32; case GST_VIDEO_FORMAT_ABGR: return IPU_PIX_FMT_ABGR32; case GST_VIDEO_FORMAT_UYVY: return IPU_PIX_FMT_UYVY; case GST_VIDEO_FORMAT_v308: return IPU_PIX_FMT_YUV444; case GST_VIDEO_FORMAT_NV12: return IPU_PIX_FMT_NV12; case GST_VIDEO_FORMAT_YV12: return IPU_PIX_FMT_YVU420P; case GST_VIDEO_FORMAT_I420: return IPU_PIX_FMT_YUV420P; case GST_VIDEO_FORMAT_Y42B: return IPU_PIX_FMT_YUV422P; case GST_VIDEO_FORMAT_Y444: return IPU_PIX_FMT_YUV444P; default: GST_WARNING("Unknown format %d (%s)", (gint)format, gst_video_format_to_string(format)); return 0; } }
gboolean gst_opencv_cv_image_type_from_video_format (GstVideoFormat format, int *cv_type, GError ** err) { const gchar *format_str; switch (format) { case GST_VIDEO_FORMAT_GRAY8: *cv_type = CV_8UC1; break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: *cv_type = CV_8UC3; break; case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_ABGR: *cv_type = CV_8UC4; break; case GST_VIDEO_FORMAT_GRAY16_LE: case GST_VIDEO_FORMAT_GRAY16_BE: *cv_type = CV_16UC1; break; default: format_str = gst_video_format_to_string (format); g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Unsupported video format %s", format_str); return FALSE; } return TRUE; }
/** * gst_video_info_to_caps: * @info: a #GstVideoInfo * * Convert the values of @info into a #GstCaps. * * Returns: a new #GstCaps containing the info of @info. */ GstCaps * gst_video_info_to_caps (GstVideoInfo * info) { GstCaps *caps; const gchar *format; gchar *color; gint par_n, par_d; GstVideoColorimetry colorimetry; g_return_val_if_fail (info != NULL, NULL); g_return_val_if_fail (info->finfo != NULL, NULL); g_return_val_if_fail (info->finfo->format != GST_VIDEO_FORMAT_UNKNOWN, NULL); format = gst_video_format_to_string (info->finfo->format); g_return_val_if_fail (format != NULL, NULL); caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", G_TYPE_INT, info->width, "height", G_TYPE_INT, info->height, NULL); par_n = info->par_n; par_d = info->par_d; gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING, gst_video_interlace_mode_to_string (info->interlace_mode), NULL); if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) { const gchar *caps_str = NULL; /* If the half-aspect flag is set, applying it into the PAR of the * resulting caps now seems safe, and helps with automatic behaviour * in elements that aren't explicitly multiview aware */ if (GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) & GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT) { GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) &= ~GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT; switch (GST_VIDEO_INFO_MULTIVIEW_MODE (info)) { case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE: case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX: case GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED: case GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD: par_n *= 2; /* double the width / half the height */ break; case GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED: case GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM: par_d *= 2; /* half the width / double the height */ break; default: break; } } caps_str = gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE (info)); if (caps_str != NULL) { gst_caps_set_simple (caps, "multiview-mode", G_TYPE_STRING, caps_str, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL); } } gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL); if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) gst_caps_set_simple (caps, "chroma-site", G_TYPE_STRING, gst_video_chroma_to_string (info->chroma_site), NULL); /* make sure we set the RGB matrix for RGB formats */ colorimetry = info->colorimetry; if (GST_VIDEO_FORMAT_INFO_IS_RGB (info->finfo) && colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) { GST_WARNING ("invalid matrix %d for RGB format, using RGB", colorimetry.matrix); colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB; } if ((color = gst_video_colorimetry_to_string (&colorimetry))) { gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, color, NULL); g_free (color); } if (info->views > 1) gst_caps_set_simple (caps, "views", G_TYPE_INT, info->views, NULL); if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) { /* variable fps with a max-framerate */ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, 0, 1, "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL); } else { /* no variable fps or no max-framerate */ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL); } return caps; }
static GstBuffer * gst_egl_allocate_eglimage (APP_STATE_T * ctx, GstAllocator * allocator, GstVideoFormat format, gint width, gint height) { GstEGLGLESImageData *data = NULL; GstBuffer *buffer; GstVideoInfo info; gint i; gint stride[3]; gsize offset[3]; GstMemory *mem[3] = { NULL, NULL, NULL }; guint n_mem; GstMemoryFlags flags = 0; memset (stride, 0, sizeof (stride)); memset (offset, 0, sizeof (offset)); if (!gst_egl_image_memory_is_mappable ()) flags |= GST_MEMORY_FLAG_NOT_MAPPABLE; /* See https://bugzilla.gnome.org/show_bug.cgi?id=695203 */ flags |= GST_MEMORY_FLAG_NO_SHARE; gst_video_info_set_format (&info, format, width, height); GST_DEBUG ("Allocating EGL Image format %s width %d height %d", gst_video_format_to_string (format), width, height); switch (format) { case GST_VIDEO_FORMAT_RGBA:{ gsize size; EGLImageKHR image; mem[0] = gst_egl_image_allocator_alloc (allocator, ctx->gst_display, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), &size); if (mem[0]) { stride[0] = size / GST_VIDEO_INFO_HEIGHT (&info); n_mem = 1; GST_MINI_OBJECT_FLAG_SET (mem[0], GST_MEMORY_FLAG_NO_SHARE); } else { data = g_slice_new0 (GstEGLGLESImageData); stride[0] = GST_ROUND_UP_4 (GST_VIDEO_INFO_WIDTH (&info) * 4); size = stride[0] * GST_VIDEO_INFO_HEIGHT (&info); glGenTextures (1, &data->texture); if (got_gl_error ("glGenTextures")) goto mem_error; glBindTexture (GL_TEXTURE_2D, data->texture); if (got_gl_error ("glBindTexture")) goto mem_error; /* Set 2D resizing params */ glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); /* If these are not set the texture image unit will return * * (R, G, B, A) = black on glTexImage2D for non-POT width/height * * frames. For a deeper explanation take a look at the OpenGL ES * * documentation for glTexParameter */ glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); if (got_gl_error ("glTexParameteri")) goto mem_error; glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA, GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); if (got_gl_error ("glTexImage2D")) goto mem_error; image = eglCreateImageKHR (gst_egl_display_get (ctx->gst_display), ctx->context, EGL_GL_TEXTURE_2D_KHR, (EGLClientBuffer) (guintptr) data->texture, NULL); if (got_egl_error ("eglCreateImageKHR")) goto mem_error; mem[0] = gst_egl_image_allocator_wrap (allocator, ctx->gst_display, image, GST_VIDEO_GL_TEXTURE_TYPE_RGBA, flags, size, data, NULL); n_mem = 1; } } break; default: goto mem_error; break; } buffer = gst_buffer_new (); gst_buffer_add_video_meta_full (buffer, 0, format, width, height, GST_VIDEO_INFO_N_PLANES (&info), offset, stride); /* n_mem could be reused for planar colorspaces, for now its == 1 for RGBA */ for (i = 0; i < n_mem; i++) gst_buffer_append_memory (buffer, mem[i]); return buffer; mem_error: { GST_ERROR ("Failed to create EGLImage"); if (data) gst_egl_gles_image_data_free (data); if (mem[0]) gst_memory_unref (mem[0]); return NULL; } }
static gboolean gst_vaapidecode_update_src_caps (GstVaapiDecode * decode) { GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode); GstPad *const srcpad = GST_VIDEO_DECODER_SRC_PAD (vdec); GstCaps *allowed; GstVideoCodecState *state, *ref_state; GstVaapiCapsFeature feature; GstCapsFeatures *features; GstCaps *allocation_caps; GstVideoInfo *vi; GstVideoFormat format; GstClockTime latency; gint fps_d, fps_n; guint width, height; const gchar *format_str, *feature_str; if (!decode->input_state) return FALSE; ref_state = decode->input_state; format = GST_VIDEO_INFO_FORMAT (&decode->decoded_info); allowed = gst_vaapidecode_get_allowed_srcpad_caps (decode); feature = gst_vaapi_find_preferred_caps_feature (srcpad, allowed, &format); gst_caps_unref (allowed); if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED) return FALSE; #if (!USE_GLX && !USE_EGL) /* This is a very pathological situation. Should not happen. */ if (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META) return FALSE; #endif if ((feature == GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY || feature == GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE) && format != GST_VIDEO_INFO_FORMAT (&decode->decoded_info)) { GST_FIXME_OBJECT (decode, "validate if driver can convert from %s to %s", gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&decode->decoded_info)), gst_video_format_to_string (format)); } width = decode->display_width; height = decode->display_height; if (!width || !height) { width = GST_VIDEO_INFO_WIDTH (&ref_state->info); height = GST_VIDEO_INFO_HEIGHT (&ref_state->info); } state = gst_video_decoder_set_output_state (vdec, format, width, height, ref_state); if (!state) return FALSE; if (GST_VIDEO_INFO_WIDTH (&state->info) == 0 || GST_VIDEO_INFO_HEIGHT (&state->info) == 0) { gst_video_codec_state_unref (state); return FALSE; } vi = &state->info; state->caps = gst_video_info_to_caps (vi); switch (feature) { case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META: case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE:{ GstStructure *structure = gst_caps_get_structure (state->caps, 0); /* Remove chroma-site and colorimetry from src caps, * which is unnecessary on downstream if using VASurface */ gst_structure_remove_fields (structure, "chroma-site", "colorimetry", NULL); feature_str = gst_vaapi_caps_feature_to_string (feature); features = gst_caps_features_new (feature_str, NULL); gst_caps_set_features (state->caps, 0, features); break; } default: break; } /* Allocation query is different from pad's caps */ allocation_caps = NULL; if (GST_VIDEO_INFO_WIDTH (&decode->decoded_info) != width || GST_VIDEO_INFO_HEIGHT (&decode->decoded_info) != height) { allocation_caps = gst_caps_copy (state->caps); format_str = gst_video_format_to_string (format); gst_caps_set_simple (allocation_caps, "width", G_TYPE_INT, GST_VIDEO_INFO_WIDTH (&decode->decoded_info), "height", G_TYPE_INT, GST_VIDEO_INFO_HEIGHT (&decode->decoded_info), "format", G_TYPE_STRING, format_str, NULL); GST_INFO_OBJECT (decode, "new alloc caps = %" GST_PTR_FORMAT, allocation_caps); } gst_caps_replace (&state->allocation_caps, allocation_caps); if (allocation_caps) gst_caps_unref (allocation_caps); GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps); gst_caps_replace (&decode->srcpad_caps, state->caps); gst_video_codec_state_unref (state); fps_n = GST_VIDEO_INFO_FPS_N (vi); fps_d = GST_VIDEO_INFO_FPS_D (vi); if (fps_n <= 0 || fps_d <= 0) { GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation"); fps_n = 25; fps_d = 1; } /* For parsing/preparation purposes we'd need at least 1 frame * latency in general, with perfectly known unit boundaries (NALU, * AU), and up to 2 frames when we need to wait for the second frame * start to determine the first frame is complete */ latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n); gst_video_decoder_set_latency (vdec, latency, latency); return TRUE; }
static HRESULT WINAPI Gstreamer_YUV_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE *amt) { GstTfImpl *This = (GstTfImpl*)tf; GstCaps *capsin, *capsout; AM_MEDIA_TYPE *outpmt = &This->tf.pmt; HRESULT hr; int avgtime; LONG width, height; TRACE("%p 0x%x %p\n", This, dir, amt); mark_wine_thread(); if (dir != PINDIR_INPUT) return S_OK; if (Gstreamer_YUV_QueryConnect(&This->tf, amt) == S_FALSE || !amt->pbFormat) return E_FAIL; FreeMediaType(outpmt); CopyMediaType(outpmt, amt); if (IsEqualGUID(&amt->formattype, &FORMAT_VideoInfo)) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } else { VIDEOINFOHEADER2 *vih = (VIDEOINFOHEADER2*)outpmt->pbFormat; avgtime = vih->AvgTimePerFrame; width = vih->bmiHeader.biWidth; height = vih->bmiHeader.biHeight; if (vih->bmiHeader.biHeight > 0) vih->bmiHeader.biHeight = -vih->bmiHeader.biHeight; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biCompression = BI_RGB; vih->bmiHeader.biSizeImage = width * abs(height) * 3; } if (!avgtime) avgtime = 10000000 / 30; outpmt->subtype = MEDIASUBTYPE_RGB24; capsin = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string( gst_video_format_from_fourcc(amt->subtype.Data1)), "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); capsout = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 10000000, avgtime, NULL); hr = Gstreamer_transform_ConnectInput(This, amt, capsin, capsout); gst_caps_unref(capsin); gst_caps_unref(capsout); This->cbBuffer = width * height * 4; return hr; }
static GstCaps * gst_fbdevsink_getcaps (GstBaseSink * bsink, GstCaps * filter) { GstFBDEVSink *fbdevsink; GstVideoFormat format; GstCaps *caps; uint32_t rmask; uint32_t gmask; uint32_t bmask; uint32_t tmask; int endianness, depth, bpp; fbdevsink = GST_FBDEVSINK (bsink); caps = gst_static_pad_template_get_caps (&sink_template); /* FIXME: locking */ if (!fbdevsink->framebuffer) goto done; bpp = fbdevsink->varinfo.bits_per_pixel; rmask = ((1 << fbdevsink->varinfo.red.length) - 1) << fbdevsink->varinfo.red.offset; gmask = ((1 << fbdevsink->varinfo.green.length) - 1) << fbdevsink->varinfo.green.offset; bmask = ((1 << fbdevsink->varinfo.blue.length) - 1) << fbdevsink->varinfo.blue.offset; tmask = ((1 << fbdevsink->varinfo.transp.length) - 1) << fbdevsink->varinfo.transp.offset; depth = fbdevsink->varinfo.red.length + fbdevsink->varinfo.green.length + fbdevsink->varinfo.blue.length; switch (fbdevsink->varinfo.bits_per_pixel) { case 32: /* swap endianness of masks */ rmask = GUINT32_SWAP_LE_BE (rmask); gmask = GUINT32_SWAP_LE_BE (gmask); bmask = GUINT32_SWAP_LE_BE (bmask); tmask = GUINT32_SWAP_LE_BE (tmask); depth += fbdevsink->varinfo.transp.length; endianness = G_BIG_ENDIAN; break; case 24:{ /* swap red and blue masks */ tmask = rmask; rmask = bmask; bmask = tmask; tmask = 0; endianness = G_BIG_ENDIAN; break; } case 15: case 16: tmask = 0; endianness = G_LITTLE_ENDIAN; break; default: goto unsupported_bpp; } format = gst_video_format_from_masks (depth, bpp, endianness, rmask, gmask, bmask, tmask); if (format == GST_VIDEO_FORMAT_UNKNOWN) goto unknown_format; caps = gst_caps_make_writable (caps); gst_caps_set_simple (caps, "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL); done: if (filter != NULL) { GstCaps *icaps; icaps = gst_caps_intersect (caps, filter); gst_caps_unref (caps); caps = icaps; } return caps; /* ERRORS */ unsupported_bpp: { GST_WARNING_OBJECT (bsink, "unsupported bit depth: %d", bpp); return NULL; } unknown_format: { GST_WARNING_OBJECT (bsink, "could not map fbdev format to GstVideoFormat: " "depth=%u, bpp=%u, endianness=%u, rmask=0x%08x, gmask=0x%08x, " "bmask=0x%08x, tmask=0x%08x", depth, bpp, endianness, rmask, gmask, bmask, tmask); return NULL; } }
/* * Get Caps * * As can be seen this method violates the API between the GST element * and the Android device. Should be fixed... (FIXME) * */ static GstCaps * gst_android_video_source_get_caps(GstBaseSrc * p_basesrc, GstCaps * p_filter) { int i; int minFps; int maxFps; int fmtPos; int minWidth, minHeight; int maxWidth, maxHeight; GstCaps *caps; GstCaps *capsVec = NULL; GA_LOGTRACE("ENTER %s --xx--> thread(%ld)", __FUNCTION__, pthread_self()); GstAndroidVideoSource *p_src = GST_ANDROIDVIDEOSOURCE(p_basesrc); if (GST_STATE(p_src) == GST_STATE_NULL || GST_STATE(p_src) <= GST_STATE_NULL) { GA_LOGINFO("%s: Called in state %s. Don't know device support yet. Will return NULL caps.", __FUNCTION__, gst_element_state_get_name(GST_STATE(p_src))); return NULL; } if (VCD_GetWidestFpsRange(p_src->m_devHandle, &minFps, &maxFps) != VCD_NO_ERROR) { return NULL; } if (VCD_NO_ERROR != VCD_GetMinResolution(p_src->m_devHandle, &minWidth, &minHeight)) { return NULL; } if (VCD_NO_ERROR != VCD_GetMaxResolution(p_src->m_devHandle, &maxWidth, &maxHeight)) { return NULL; } capsVec = gst_caps_new_empty(); for (fmtPos = 0; fmtPos < VCD_getMediaSupportFmtLen(p_src->m_devHandle); fmtPos++) { int fmt = VCD_getMediaSupportFmt(p_src->m_devHandle)[fmtPos]; GstVideoFormat gstVideoFmt = vcd_int_to_gst_video_format(fmt); if (gstVideoFmt != GST_VIDEO_FORMAT_UNKNOWN) { caps = gst_caps_new_simple( "video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gstVideoFmt), "width", GST_TYPE_INT_RANGE, minWidth, maxWidth, "height", GST_TYPE_INT_RANGE, minHeight, maxHeight, #ifdef ACCEPT_FPS_CAPS_DOWN_TO_1FPS "framerate", GST_TYPE_FRACTION_RANGE, 1000, ANDROID_FPS_DENOMINATOR, maxFps, ANDROID_FPS_DENOMINATOR, #else "framerate", GST_TYPE_FRACTION_RANGE, minFps, ANDROID_FPS_DENOMINATOR, maxFps, ANDROID_FPS_DENOMINATOR, #endif "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL); gst_caps_append(capsVec, caps); } } // Some Android devices report one or more supported formats (or other stuff) // more than once, which gives caps duplicates. Those are removed by doing // gst_caps_do_simplify()... capsVec = gst_caps_simplify(capsVec); GA_LOGINFO("%s: By Android video device supported CAPS:", __FUNCTION__); GA_LOGINFO("%s:-----------------------------------------------------------", __FUNCTION__); for (i = 0; i < gst_caps_get_size(capsVec); i++) { // Android log cannot print that long messages so we need to take one caps at a time GstCaps *capsCopy = gst_caps_copy_nth(capsVec, i); GA_LOGINFO("CAPS%d: %s", i+1, gst_caps_to_string(capsCopy)); gst_caps_unref(capsCopy); } GA_LOGINFO("%s:-----------------------------------------------------------", __FUNCTION__); GA_LOGTRACE("EXIT %s", __FUNCTION__); return capsVec; }
static void check_conversion (TestFrame * frames, guint size) { gint i, j, k, l; gint ref_count = 0; for (i = 0; i < size; i++) { GstBuffer *inbuf; GstVideoInfo in_info; gint in_width = frames[i].width; gint in_height = frames[i].height; GstVideoFormat in_v_format = frames[i].v_format; gchar *in_data[GST_VIDEO_MAX_PLANES] = { 0 }; GstGLMemory *in_mem[GST_VIDEO_MAX_PLANES] = { 0 }; GstVideoFrame in_frame; GstCaps *in_caps; gst_video_info_set_format (&in_info, in_v_format, in_width, in_height); in_caps = gst_video_info_to_caps (&in_info); gst_caps_set_features (in_caps, 0, gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY)); for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) { in_data[j] = frames[i].data[j]; } /* create GL buffer */ ref_count += GST_VIDEO_INFO_N_PLANES (&in_info); inbuf = gst_buffer_new (); fail_unless (gst_gl_memory_setup_wrapped (context, GST_GL_TEXTURE_TARGET_2D, &in_info, NULL, (gpointer *) in_data, in_mem, &ref_count, _frame_unref)); for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) { gst_buffer_append_memory (inbuf, (GstMemory *) in_mem[j]); } fail_unless (gst_video_frame_map (&in_frame, &in_info, inbuf, GST_MAP_READ)); /* sanity check that the correct values were wrapped */ for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) { for (k = 0; k < _video_info_plane_size (&in_info, j); k++) { if (in_data[j][k] != IGNORE_MAGIC) fail_unless (((gchar *) in_frame.data[j])[k] == in_data[j][k]); } } for (j = 0; j < size; j++) { GstBuffer *outbuf; GstVideoInfo out_info; gint out_width = frames[j].width; gint out_height = frames[j].height; GstVideoFormat out_v_format = frames[j].v_format; gchar *out_data[GST_VIDEO_MAX_PLANES] = { 0 }; GstVideoFrame out_frame; GstCaps *out_caps; gst_video_info_set_format (&out_info, out_v_format, out_width, out_height); out_caps = gst_video_info_to_caps (&out_info); gst_caps_set_features (out_caps, 0, gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY)); for (k = 0; k < GST_VIDEO_INFO_N_PLANES (&out_info); k++) { out_data[k] = frames[j].data[k]; } gst_gl_color_convert_set_caps (convert, in_caps, out_caps); /* convert the data */ outbuf = gst_gl_color_convert_perform (convert, inbuf); if (outbuf == NULL) { const gchar *in_str = gst_video_format_to_string (in_v_format); const gchar *out_str = gst_video_format_to_string (out_v_format); GST_WARNING ("failed to convert from %s to %s", in_str, out_str); } fail_unless (gst_video_frame_map (&out_frame, &out_info, outbuf, GST_MAP_READ)); /* check that the converted values are correct */ for (k = 0; k < GST_VIDEO_INFO_N_PLANES (&out_info); k++) { for (l = 0; l < _video_info_plane_size (&out_info, k); l++) { gchar out_pixel = ((gchar *) out_frame.data[k])[l]; if (out_data[k][l] != IGNORE_MAGIC && out_pixel != IGNORE_MAGIC) fail_unless (out_pixel == out_data[k][l]); /* FIXME: check alpha clobbering */ } } gst_caps_unref (out_caps); gst_video_frame_unmap (&out_frame); gst_buffer_unref (outbuf); } gst_caps_unref (in_caps); gst_video_frame_unmap (&in_frame); gst_buffer_unref (inbuf); fail_unless_equals_int (ref_count, 0); } }