/* calculate how much loss a conversion would be */ static void score_value(GstBaseTransform * base, const GstVideoFormatInfo * in_info, const GValue * val, gint * min_loss, const GstVideoFormatInfo ** out_info) { const gchar *fname; const GstVideoFormatInfo *t_info; GstVideoFormatFlags in_flags, t_flags; gint loss; fname = g_value_get_string(val); t_info = gst_video_format_get_info(gst_video_format_from_string(fname)); if (!t_info) return; /* accept input format immediately without loss */ if (in_info == t_info) { *min_loss = 0; *out_info = t_info; return; } loss = 1; in_flags = GST_VIDEO_FORMAT_INFO_FLAGS(in_info); in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE; in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX; in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK; t_flags = GST_VIDEO_FORMAT_INFO_FLAGS(t_info); t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE; t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX; t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK; if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) loss += SCORE_PALETTE_LOSS; if ((t_flags & COLOR_MASK) != (in_flags & COLOR_MASK)) loss += SCORE_COLOR_LOSS; if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) loss += SCORE_ALPHA_LOSS; if ((in_info->h_sub[1]) < (t_info->h_sub[1])) loss += SCORE_CHROMA_H_LOSS; if ((in_info->w_sub[1]) < (t_info->w_sub[1])) loss += SCORE_CHROMA_W_LOSS; if ((in_info->bits) > (t_info->bits)) loss += SCORE_DEPTH_LOSS; GST_DEBUG_OBJECT(base, "score %s -> %s = %d", GST_VIDEO_FORMAT_INFO_NAME(in_info), GST_VIDEO_FORMAT_INFO_NAME(t_info), loss); if (loss < *min_loss) { GST_DEBUG_OBJECT(base, "found new best %d", loss); *out_info = t_info; *min_loss = loss; } }
EglConfig * egl_config_new (EglDisplay * display, guint gles_version, GstVideoFormat format) { EGLint attribs[2 * 6 + 1], *attrib = attribs; const GstVideoFormatInfo *finfo; const GlVersionInfo *vinfo; g_return_val_if_fail (display != NULL, NULL); finfo = gst_video_format_get_info (format); if (!finfo || !GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) return NULL; vinfo = gl_version_info_lookup (gles_version); if (!vinfo) return NULL; *attrib++ = EGL_COLOR_BUFFER_TYPE; *attrib++ = EGL_RGB_BUFFER; *attrib++ = EGL_RED_SIZE; *attrib++ = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, GST_VIDEO_COMP_R); *attrib++ = EGL_GREEN_SIZE; *attrib++ = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, GST_VIDEO_COMP_G); *attrib++ = EGL_BLUE_SIZE; *attrib++ = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, GST_VIDEO_COMP_B); *attrib++ = EGL_ALPHA_SIZE; *attrib++ = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, GST_VIDEO_COMP_A); *attrib++ = EGL_RENDERABLE_TYPE; *attrib++ = vinfo->gl_api_bit; *attrib++ = EGL_NONE; g_assert (attrib - attribs <= G_N_ELEMENTS (attribs)); return egl_config_new_with_attribs (display, attribs); }
static void gst_video_convert_fixate_format (GstBaseTransform * base, GstCaps * caps, GstCaps * result) { GstStructure *ins, *outs; const gchar *in_format; const GstVideoFormatInfo *in_info, *out_info = NULL; gint min_loss = G_MAXINT; guint i, capslen; ins = gst_caps_get_structure (caps, 0); in_format = gst_structure_get_string (ins, "format"); if (!in_format) return; GST_DEBUG_OBJECT (base, "source format %s", in_format); in_info = gst_video_format_get_info (gst_video_format_from_string (in_format)); if (!in_info) return; outs = gst_caps_get_structure (result, 0); capslen = gst_caps_get_size (result); GST_DEBUG_OBJECT (base, "iterate %d structures", capslen); for (i = 0; i < capslen; i++) { GstStructure *tests; const GValue *format; tests = gst_caps_get_structure (result, i); format = gst_structure_get_value (tests, "format"); /* should not happen */ if (format == NULL) continue; if (GST_VALUE_HOLDS_LIST (format)) { gint j, len; len = gst_value_list_get_size (format); GST_DEBUG_OBJECT (base, "have %d formats", len); for (j = 0; j < len; j++) { const GValue *val; val = gst_value_list_get_value (format, j); if (G_VALUE_HOLDS_STRING (val)) { score_value (base, in_info, val, &min_loss, &out_info); if (min_loss == 0) break; } } } else if (G_VALUE_HOLDS_STRING (format)) { score_value (base, in_info, format, &min_loss, &out_info); } } if (out_info) gst_structure_set (outs, "format", G_TYPE_STRING, GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL); }
/** * gst_video_info_init: * @info: a #GstVideoInfo * * Initialize @info with default values. */ void gst_video_info_init (GstVideoInfo * info) { g_return_if_fail (info != NULL); memset (info, 0, sizeof (GstVideoInfo)); info->finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_UNKNOWN); info->views = 1; /* arrange for sensible defaults, e.g. if turned into caps */ info->fps_n = 0; info->fps_d = 1; info->par_n = 1; info->par_d = 1; GST_VIDEO_INFO_MULTIVIEW_MODE (info) = GST_VIDEO_MULTIVIEW_MODE_NONE; }
static gboolean gst_opencv_get_ipl_depth_and_channels (GstStructure * structure, gint * ipldepth, gint * channels, GError ** err) { GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN; const GstVideoFormatInfo *info; gint depth = 0, i; const gchar *s; if (gst_structure_has_name (structure, "video/x-raw")) { if (!(s = gst_structure_get_string (structure, "format"))) return FALSE; format = gst_video_format_from_string (s); if (format == GST_VIDEO_FORMAT_UNKNOWN) return FALSE; } info = gst_video_format_get_info (format); if (GST_VIDEO_FORMAT_INFO_IS_RGB (info)) *channels=3; else if (GST_VIDEO_FORMAT_INFO_IS_GRAY (info)) *channels=1; else { g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Unsupported structure %s", gst_structure_get_name (structure)); return FALSE; } for (i = 0; i < GST_VIDEO_FORMAT_INFO_N_COMPONENTS (info); i++) depth += GST_VIDEO_FORMAT_INFO_DEPTH (info, i); if (depth / *channels == 8) { /* TODO signdness? */ *ipldepth = IPL_DEPTH_8U; } else if (depth / *channels == 16) { *ipldepth = IPL_DEPTH_16U; } else { g_set_error (err, GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, "Unsupported depth/channels %d/%d", depth, *channels); return FALSE; } return TRUE; }
/** * gst_video_info_set_format: * @info: a #GstVideoInfo * @format: the format * @width: a width * @height: a height * * Set the default info for a video frame of @format and @width and @height. * * Note: This initializes @info first, no values are preserved. This function * does not set the offsets correctly for interlaced vertically * subsampled formats. */ void gst_video_info_set_format (GstVideoInfo * info, GstVideoFormat format, guint width, guint height) { g_return_if_fail (info != NULL); g_return_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN); gst_video_info_init (info); info->finfo = gst_video_format_get_info (format); info->width = width; info->height = height; info->views = 1; set_default_colorimetry (info); fill_planes (info); }
static gboolean meta_texture_ensure_info_from_buffer (GstVaapiVideoMetaTexture * meta, GstBuffer * buffer) { GstVideoMeta *vmeta; GstVideoFormat format; if (!buffer || !(vmeta = gst_buffer_get_video_meta (buffer))) { format = DEFAULT_FORMAT; meta->width = 0; meta->height = 0; } else { const GstVideoFormatInfo *const fmt_info = gst_video_format_get_info (vmeta->format); format = (fmt_info && GST_VIDEO_FORMAT_INFO_IS_RGB (fmt_info)) ? vmeta->format : DEFAULT_FORMAT; meta->width = vmeta->width; meta->height = vmeta->height; } return meta_texture_ensure_format (meta, format); }
/** * gst_video_info_from_caps: * @info: a #GstVideoInfo * @caps: a #GstCaps * * Parse @caps and update @info. * * Returns: TRUE if @caps could be parsed */ gboolean gst_video_info_from_caps (GstVideoInfo * info, const GstCaps * caps) { GstStructure *structure; const gchar *s; GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN; gint width = 0, height = 0; gint fps_n, fps_d; gint par_n, par_d; g_return_val_if_fail (info != NULL, FALSE); g_return_val_if_fail (caps != NULL, FALSE); g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); GST_DEBUG ("parsing caps %" GST_PTR_FORMAT, caps); structure = gst_caps_get_structure (caps, 0); if (gst_structure_has_name (structure, "video/x-raw")) { if (!(s = gst_structure_get_string (structure, "format"))) goto no_format; format = gst_video_format_from_string (s); if (format == GST_VIDEO_FORMAT_UNKNOWN) goto unknown_format; } else if (g_str_has_prefix (gst_structure_get_name (structure), "video/") || g_str_has_prefix (gst_structure_get_name (structure), "image/")) { format = GST_VIDEO_FORMAT_ENCODED; } else { goto wrong_name; } /* width and height are mandatory, except for non-raw-formats */ if (!gst_structure_get_int (structure, "width", &width) && format != GST_VIDEO_FORMAT_ENCODED) goto no_width; if (!gst_structure_get_int (structure, "height", &height) && format != GST_VIDEO_FORMAT_ENCODED) goto no_height; gst_video_info_init (info); info->finfo = gst_video_format_get_info (format); info->width = width; info->height = height; if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { if (fps_n == 0) { /* variable framerate */ info->flags |= GST_VIDEO_FLAG_VARIABLE_FPS; /* see if we have a max-framerate */ gst_structure_get_fraction (structure, "max-framerate", &fps_n, &fps_d); } info->fps_n = fps_n; info->fps_d = fps_d; } else { /* unspecified is variable framerate */ info->fps_n = 0; info->fps_d = 1; } if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { info->par_n = par_n; info->par_d = par_d; } else { info->par_n = 1; info->par_d = 1; } if ((s = gst_structure_get_string (structure, "interlace-mode"))) info->interlace_mode = gst_video_interlace_mode_from_string (s); else info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; { if ((s = gst_structure_get_string (structure, "multiview-mode"))) GST_VIDEO_INFO_MULTIVIEW_MODE (info) = gst_video_multiview_mode_from_caps_string (s); else GST_VIDEO_INFO_MULTIVIEW_MODE (info) = GST_VIDEO_MULTIVIEW_MODE_NONE; gst_structure_get_flagset (structure, "multiview-flags", &GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), NULL); if (!gst_structure_get_int (structure, "views", &info->views)) info->views = 1; /* At one point, I tried normalising the half-aspect flag here, * but it behaves weird for GstVideoInfo operations other than * directly converting to/from caps - sometimes causing the * PAR to be doubled/halved too many times */ } if ((s = gst_structure_get_string (structure, "chroma-site"))) info->chroma_site = gst_video_chroma_from_string (s); else info->chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN; if ((s = gst_structure_get_string (structure, "colorimetry"))) { if (!gst_video_colorimetry_from_string (&info->colorimetry, s)) { GST_WARNING ("unparsable colorimetry, using default"); set_default_colorimetry (info); } else if (!validate_colorimetry (info)) { GST_WARNING ("invalid colorimetry, using default"); set_default_colorimetry (info); } else { /* force RGB matrix for RGB formats */ if (GST_VIDEO_FORMAT_INFO_IS_RGB (info->finfo) && info->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) { GST_WARNING ("invalid matrix %d for RGB format, using RGB", info->colorimetry.matrix); info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB; } } } else { GST_DEBUG ("no colorimetry, using default"); set_default_colorimetry (info); } fill_planes (info); return TRUE; /* ERROR */ wrong_name: { GST_ERROR ("wrong name '%s', expected video/ or image/", gst_structure_get_name (structure)); return FALSE; } no_format: { GST_ERROR ("no format given"); return FALSE; } unknown_format: { GST_ERROR ("unknown format '%s' given", s); return FALSE; } no_width: { GST_ERROR ("no width property given"); return FALSE; } no_height: { GST_ERROR ("no height property given"); return FALSE; } }
static GstCaps * gst_phoenixsrc_get_caps (GstBaseSrc * bsrc, GstCaps * filter) { GstPhoenixSrc *src = GST_PHOENIX_SRC (bsrc); etStat eStat = PHX_OK; /* Status variable */ etParamValue eParamValue = PHX_INVALID_PARAMVALUE; ui32 dwParamValue = 0; guint32 phx_format; gint width, height; gint bpp, depth, endianness; GstVideoFormat videoFormat; gboolean is_gray16 = FALSE, is_bayer = FALSE; GstVideoInfo vinfo; GstCaps *caps; if (!src->hCamera) { return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src)); } /* Create video info */ gst_video_info_init (&vinfo); /* Get format (mono, Bayer, RBG, etc.) */ eStat = PHX_ParameterGet (src->hCamera, PHX_DST_FORMAT, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; phx_format = dwParamValue; /* Get endianness */ eStat = PHX_ParameterGet (src->hCamera, PHX_DST_ENDIAN, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; endianness = (dwParamValue == PHX_DST_LITTLE_ENDIAN) ? G_LITTLE_ENDIAN : G_BIG_ENDIAN; /* get width */ eStat = PHX_ParameterGet (src->hCamera, PHX_ROI_XLENGTH_SCALED, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; width = dwParamValue; /* get height */ eStat = PHX_ParameterGet (src->hCamera, PHX_ROI_YLENGTH_SCALED, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; height = dwParamValue; switch (phx_format) { case PHX_DST_FORMAT_Y8: videoFormat = GST_VIDEO_FORMAT_GRAY8; break; case PHX_DST_FORMAT_Y10: bpp = 10; is_gray16 = TRUE; break; case PHX_DST_FORMAT_Y12: bpp = 12; is_gray16 = TRUE; break; case PHX_DST_FORMAT_Y14: bpp = 14; is_gray16 = TRUE; break; case PHX_DST_FORMAT_Y16: bpp = 16; is_gray16 = TRUE; break; case PHX_DST_FORMAT_BAY8: bpp = 8; depth = 8; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY10: bpp = 10; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY12: bpp = 12; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY14: bpp = 14; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY16: bpp = 16; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_RGB15: videoFormat = GST_VIDEO_FORMAT_RGB15; break; case PHX_DST_FORMAT_RGB16: videoFormat = GST_VIDEO_FORMAT_RGB16; break; case PHX_DST_FORMAT_RGB24: videoFormat = GST_VIDEO_FORMAT_RGB; break; case PHX_DST_FORMAT_RGB32: /* FIXME: what is the format of this? */ case PHX_DST_FORMAT_XRGB32: videoFormat = GST_VIDEO_FORMAT_xRGB; break; default: videoFormat = GST_VIDEO_FORMAT_UNKNOWN; } if (is_gray16) videoFormat = (endianness == G_LITTLE_ENDIAN) ? GST_VIDEO_FORMAT_GRAY16_LE : GST_VIDEO_FORMAT_GRAY16_BE; if (is_bayer) { const gchar *bay_fmt; eStat = PHX_ParameterGet (src->hCamera, PHX_CAM_SRC_COL, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; switch (dwParamValue) { case PHX_CAM_SRC_BAY_RGGB: bay_fmt = (depth == 16) ? "rggb16" : "rggb"; break; case PHX_CAM_SRC_BAY_GRBG: bay_fmt = (depth == 16) ? "grbg16" : "grbg"; break; case PHX_CAM_SRC_BAY_GBRG: bay_fmt = (depth == 16) ? "gbrg16" : "gbrg"; break; case PHX_CAM_SRC_BAY_BGGR: bay_fmt = (depth == 16) ? "bggr16" : "bggr"; break; default: GST_ERROR_OBJECT (src, "Unknown PHX_CAM_SRC_COL=%d", dwParamValue); goto Error; } if (depth == 8) { caps = gst_caps_new_simple ("video/x-bayer", "format", G_TYPE_STRING, bay_fmt, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } else if (depth == 16) { caps = gst_caps_new_simple ("video/x-bayer", "format", G_TYPE_STRING, bay_fmt, "bpp", G_TYPE_INT, bpp, "endianness", G_TYPE_INT, endianness, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } } else if (videoFormat != GST_VIDEO_FORMAT_UNKNOWN) { vinfo.finfo = gst_video_format_get_info (videoFormat); vinfo.width = width; vinfo.height = height; caps = gst_video_info_to_caps (&vinfo); if (is_gray16) { GValue val = G_VALUE_INIT; GstStructure *s = gst_caps_get_structure (caps, 0); g_value_init (&val, G_TYPE_INT); g_value_set_int (&val, bpp); gst_structure_set_value (s, "bpp", &val); g_value_unset (&val); } } else { GST_ELEMENT_ERROR (src, STREAM, WRONG_TYPE, (("Unknown or unsupported color format.")), (NULL)); goto Error; } /* get buffer size; width (in bytes) and height (in lines) */ eStat = PHX_ParameterGet (src->hCamera, PHX_BUF_DST_XLENGTH, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; src->phx_stride = dwParamValue; eStat = PHX_ParameterGet (src->hCamera, PHX_BUF_DST_YLENGTH, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; /* TODO: should we be using PHX_BUF_DST_YLENGTH or PHX_ROI_YLENGTH_SCALED for height? */ g_assert (dwParamValue == height); GST_DEBUG_OBJECT (src, "The caps before filtering are %" GST_PTR_FORMAT, caps); if (filter) { GstCaps *tmp = gst_caps_intersect (caps, filter); gst_caps_unref (caps); caps = tmp; } GST_DEBUG_OBJECT (src, "The caps after filtering are %" GST_PTR_FORMAT, caps); return caps; ResourceSettingsError: GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (("Failed to get Phoenix parameters.")), (NULL)); Error: return NULL; }
/* returns static descriptions and dynamic ones (such as video/x-raw), * or NULL if caps aren't known at all */ static gchar * format_info_get_desc (const FormatInfo * info, const GstCaps * caps) { const GstStructure *s; g_assert (info != NULL); if (info->desc != NULL) return g_strdup (_(info->desc)); s = gst_caps_get_structure (caps, 0); if (strcmp (info->type, "video/x-raw") == 0) { gchar *ret = NULL; const gchar *str = 0; GstVideoFormat format; const GstVideoFormatInfo *finfo; str = gst_structure_get_string (s, "format"); if (str == NULL) return g_strdup (_("Uncompressed video")); format = gst_video_format_from_string (str); if (format == GST_VIDEO_FORMAT_UNKNOWN) return g_strdup (_("Uncompressed video")); finfo = gst_video_format_get_info (format); if (GST_VIDEO_FORMAT_INFO_IS_GRAY (finfo)) { ret = g_strdup (_("Uncompressed gray")); } else if (GST_VIDEO_FORMAT_INFO_IS_YUV (finfo)) { const gchar *subs; gint w_sub, h_sub, n_semi; w_sub = GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1); h_sub = GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 1); if (w_sub == 1 && h_sub == 1) { subs = "4:4:4"; } else if (w_sub == 2 && h_sub == 1) { subs = "4:2:2"; } else if (w_sub == 2 && h_sub == 2) { subs = "4:2:0"; } else if (w_sub == 4 && h_sub == 1) { subs = "4:1:1"; } else { subs = ""; } n_semi = GST_VIDEO_FORMAT_INFO_HAS_ALPHA (finfo) ? 3 : 2; if (GST_VIDEO_FORMAT_INFO_N_PLANES (finfo) == 1) { ret = g_strdup_printf (_("Uncompressed packed YUV %s"), subs); } else if (GST_VIDEO_FORMAT_INFO_N_PLANES (finfo) == n_semi) { ret = g_strdup_printf (_("Uncompressed semi-planar YUV %s"), subs); } else { ret = g_strdup_printf (_("Uncompressed planar YUV %s"), subs); } } else if (GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) { gboolean alpha, palette; gint bits; alpha = GST_VIDEO_FORMAT_INFO_HAS_ALPHA (finfo); palette = GST_VIDEO_FORMAT_INFO_HAS_PALETTE (finfo); bits = GST_VIDEO_FORMAT_INFO_BITS (finfo); if (palette) { ret = g_strdup_printf (_("Uncompressed palettized %d-bit %s"), bits, alpha ? "RGBA" : "RGB"); } else { ret = g_strdup_printf (_("Uncompressed %d-bit %s"), bits, alpha ? "RGBA" : "RGB"); } } else { ret = g_strdup (_("Uncompressed video")); } return ret; } else if (strcmp (info->type, "video/x-h263") == 0) { const gchar *variant, *ret; variant = gst_structure_get_string (s, "variant"); if (variant == NULL) ret = "H.263"; else if (strcmp (variant, "itu") == 0) ret = "ITU H.26n"; /* why not ITU H.263? (tpm) */ else if (strcmp (variant, "lead") == 0) ret = "Lead H.263"; else if (strcmp (variant, "microsoft") == 0) ret = "Microsoft H.263"; else if (strcmp (variant, "vdolive") == 0) ret = "VDOLive"; else if (strcmp (variant, "vivo") == 0) ret = "Vivo H.263"; else if (strcmp (variant, "xirlink") == 0) ret = "Xirlink H.263"; else { GST_WARNING ("Unknown H263 variant '%s'", variant); ret = "H.263"; } return g_strdup (ret); } else if (strcmp (info->type, "video/x-h264") == 0) { const gchar *variant, *ret; const gchar *profile; variant = gst_structure_get_string (s, "variant"); if (variant == NULL) ret = "H.264"; else if (strcmp (variant, "itu") == 0) ret = "ITU H.264"; else if (strcmp (variant, "videosoft") == 0) ret = "Videosoft H.264"; else if (strcmp (variant, "lead") == 0) ret = "Lead H.264"; else { GST_WARNING ("Unknown H264 variant '%s'", variant); ret = "H.264"; } /* profile */ profile = gst_structure_get_string (s, "profile"); if (profile != NULL) profile = pbutils_desc_get_h264_profile_name_from_nick (profile); if (profile == NULL) return g_strdup (ret); return g_strdup_printf ("%s (%s Profile)", ret, profile); } else if (strcmp (info->type, "video/x-h265") == 0) { const gchar *profile = gst_structure_get_string (s, "profile"); if (profile != NULL) profile = pbutils_desc_get_h265_profile_name_from_nick (profile); if (profile != NULL) return g_strdup_printf ("H.265 (%s Profile)", profile); return g_strdup ("H.265"); } else if (strcmp (info->type, "video/x-dirac") == 0) { const gchar *profile = gst_structure_get_string (s, "profile"); if (profile == NULL) return g_strdup ("Dirac"); if (strcmp (profile, "vc2-low-delay") == 0) return g_strdup_printf ("Dirac (%s)", "VC-2 Low Delay Profile"); else if (strcmp (profile, "vc2-simple") == 0) return g_strdup_printf ("Dirac (%s)", "VC-2 Simple Profile"); else if (strcmp (profile, "vc2-main") == 0) return g_strdup_printf ("Dirac (%s)", "VC-2 Main Profile"); else return g_strdup ("Dirac"); } else if (strcmp (info->type, "video/x-divx") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "divxversion", &ver) || ver <= 2) { GST_WARNING ("Unexpected DivX version in %" GST_PTR_FORMAT, caps); return g_strdup ("DivX MPEG-4"); } return g_strdup_printf (_("DivX MPEG-4 Version %d"), ver); } else if (strcmp (info->type, "video/x-msmpeg") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "msmpegversion", &ver) || ver < 40 || ver > 49) { GST_WARNING ("Unexpected msmpegversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Microsoft MPEG-4 4.x"); } return g_strdup_printf ("Microsoft MPEG-4 4.%d", ver % 10); } else if (strcmp (info->type, "video/x-truemotion") == 0) { gint ver = 0; gst_structure_get_int (s, "trueversion", &ver); switch (ver) { case 1: return g_strdup_printf ("Duck TrueMotion 1"); case 2: return g_strdup_printf ("TrueMotion 2.0"); default: GST_WARNING ("Unexpected trueversion in %" GST_PTR_FORMAT, caps); break; } return g_strdup_printf ("TrueMotion"); } else if (strcmp (info->type, "video/x-xan") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "wcversion", &ver) || ver < 1) { GST_WARNING ("Unexpected wcversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Xan Wing Commander"); } return g_strdup_printf ("Xan Wing Commander %u", ver); } else if (strcmp (info->type, "video/x-indeo") == 0) { gint ver = 0; if (!gst_structure_get_int (s, "indeoversion", &ver) || ver < 2) { GST_WARNING ("Unexpected indeoversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Intel Indeo"); } return g_strdup_printf ("Intel Indeo %u", ver); } else if (strcmp (info->type, "audio/x-wma") == 0) { gint ver = 0; gst_structure_get_int (s, "wmaversion", &ver); switch (ver) { case 1: case 2: case 3: return g_strdup_printf ("Windows Media Audio %d", ver + 6); default: break; } GST_WARNING ("Unexpected wmaversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Windows Media Audio"); } else if (strcmp (info->type, "video/x-wmv") == 0) { gint ver = 0; const gchar *str; gst_structure_get_int (s, "wmvversion", &ver); str = gst_structure_get_string (s, "format"); switch (ver) { case 1: case 2: case 3: if (str && strncmp (str, "MSS", 3)) { return g_strdup_printf ("Windows Media Video %d Screen", ver + 6); } else { return g_strdup_printf ("Windows Media Video %d", ver + 6); } default: break; } GST_WARNING ("Unexpected wmvversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Windows Media Video"); } else if (strcmp (info->type, "audio/x-mace") == 0) { gint ver = 0; gst_structure_get_int (s, "maceversion", &ver); if (ver == 3 || ver == 6) { return g_strdup_printf ("MACE-%d", ver); } else { GST_WARNING ("Unexpected maceversion in %" GST_PTR_FORMAT, caps); return g_strdup ("MACE"); } } else if (strcmp (info->type, "video/x-svq") == 0) { gint ver = 0; gst_structure_get_int (s, "svqversion", &ver); if (ver == 1 || ver == 3) { return g_strdup_printf ("Sorensen Video %d", ver); } else { GST_WARNING ("Unexpected svqversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Sorensen Video"); } } else if (strcmp (info->type, "video/x-asus") == 0) { gint ver = 0; gst_structure_get_int (s, "asusversion", &ver); if (ver == 1 || ver == 2) { return g_strdup_printf ("Asus Video %d", ver); } else { GST_WARNING ("Unexpected asusversion in %" GST_PTR_FORMAT, caps); return g_strdup ("Asus Video"); } } else if (strcmp (info->type, "video/x-ati-vcr") == 0) { gint ver = 0; gst_structure_get_int (s, "vcrversion", &ver); if (ver == 1 || ver == 2) { return g_strdup_printf ("ATI VCR %d", ver); } else { GST_WARNING ("Unexpected acrversion in %" GST_PTR_FORMAT, caps); return g_strdup ("ATI VCR"); } } else if (strcmp (info->type, "audio/x-adpcm") == 0) { const GValue *layout_val; layout_val = gst_structure_get_value (s, "layout"); if (layout_val != NULL && G_VALUE_HOLDS_STRING (layout_val)) { const gchar *layout; if ((layout = g_value_get_string (layout_val))) { gchar *layout_upper, *ret; if (strcmp (layout, "swf") == 0) return g_strdup ("Shockwave ADPCM"); if (strcmp (layout, "microsoft") == 0) return g_strdup ("Microsoft ADPCM"); if (strcmp (layout, "quicktime") == 0) return g_strdup ("Quicktime ADPCM"); if (strcmp (layout, "westwood") == 0) return g_strdup ("Westwood ADPCM"); if (strcmp (layout, "yamaha") == 0) return g_strdup ("Yamaha ADPCM"); /* FIXME: other layouts: sbpro2, sbpro3, sbpro4, ct, g726, ea, * adx, xa, 4xm, smjpeg, dk4, dk3, dvi */ layout_upper = g_ascii_strup (layout, -1); ret = g_strdup_printf ("%s ADPCM", layout_upper); g_free (layout_upper); return ret; } } return g_strdup ("ADPCM"); } else if (strcmp (info->type, "audio/mpeg") == 0) { gint ver = 0, layer = 0; gst_structure_get_int (s, "mpegversion", &ver); switch (ver) { case 1: gst_structure_get_int (s, "layer", &layer); switch (layer) { case 1: case 2: case 3: return g_strdup_printf ("MPEG-1 Layer %d (MP%d)", layer, layer); default: break; } GST_WARNING ("Unexpected MPEG-1 layer in %" GST_PTR_FORMAT, caps); return g_strdup ("MPEG-1 Audio"); case 2: return g_strdup ("MPEG-2 AAC"); case 4: return g_strdup ("MPEG-4 AAC"); default: break; } GST_WARNING ("Unexpected audio mpegversion in %" GST_PTR_FORMAT, caps); return g_strdup ("MPEG Audio"); } else if (strcmp (info->type, "audio/x-pn-realaudio") == 0) { gint ver = 0; gst_structure_get_int (s, "raversion", &ver); switch (ver) { case 1: return g_strdup ("RealAudio 14k4bps"); case 2: return g_strdup ("RealAudio 28k8bps"); case 8: return g_strdup ("RealAudio G2 (Cook)"); default: break; } GST_WARNING ("Unexpected raversion in %" GST_PTR_FORMAT, caps); return g_strdup ("RealAudio"); } else if (strcmp (info->type, "video/x-pn-realvideo") == 0) { gint ver = 0; gst_structure_get_int (s, "rmversion", &ver); switch (ver) { case 1: return g_strdup ("RealVideo 1.0"); case 2: return g_strdup ("RealVideo 2.0"); case 3: return g_strdup ("RealVideo 3.0"); case 4: return g_strdup ("RealVideo 4.0"); default: break; } GST_WARNING ("Unexpected rmversion in %" GST_PTR_FORMAT, caps); return g_strdup ("RealVideo"); } else if (strcmp (info->type, "video/mpeg") == 0) { gboolean sysstream; gint ver = 0; if (!gst_structure_get_boolean (s, "systemstream", &sysstream)) { GST_WARNING ("Missing systemstream field in mpeg video caps " "%" GST_PTR_FORMAT, caps); sysstream = FALSE; } if (gst_structure_get_int (s, "mpegversion", &ver) && ver > 0 && ver <= 4) { if (sysstream) { return g_strdup_printf ("MPEG-%d System Stream", ver); } else { const gchar *profile = gst_structure_get_string (s, "profile"); if (profile != NULL) { if (ver == 4) profile = pbutils_desc_get_mpeg4v_profile_name_from_nick (profile); else if (ver == 2) profile = pbutils_desc_get_mpeg2v_profile_name_from_nick (profile); else profile = NULL; } if (profile != NULL) return g_strdup_printf ("MPEG-%d Video (%s Profile)", ver, profile); else return g_strdup_printf ("MPEG-%d Video", ver); } } GST_WARNING ("Missing mpegversion field in mpeg video caps " "%" GST_PTR_FORMAT, caps); return g_strdup ("MPEG Video"); } else if (strcmp (info->type, "audio/x-raw") == 0) { gint depth = 0; gboolean is_float; const gchar *str; GstAudioFormat format; const GstAudioFormatInfo *finfo; str = gst_structure_get_string (s, "format"); format = gst_audio_format_from_string (str); if (format == GST_AUDIO_FORMAT_UNKNOWN) return g_strdup (_("Uncompressed audio")); finfo = gst_audio_format_get_info (format); depth = GST_AUDIO_FORMAT_INFO_DEPTH (finfo); is_float = GST_AUDIO_FORMAT_INFO_IS_FLOAT (finfo); return g_strdup_printf (_("Raw %d-bit %s audio"), depth, is_float ? "floating-point" : "PCM"); } else if (strcmp (info->type, "video/x-tscc") == 0) { gint version; gst_structure_get_int (s, "tsccversion", &version); switch (version) { case 1: return g_strdup ("TechSmith Screen Capture 1"); case 2: return g_strdup ("TechSmith Screen Capture 2"); default: break; } GST_WARNING ("Unexpected version in %" GST_PTR_FORMAT, caps); return g_strdup ("TechSmith Screen Capture"); } return NULL; }
/** * gst_niimaqsrc_get_cam_caps: * src: #GstNiImaq instance * * Get caps of camera attached to open IMAQ interface * * Returns: the #GstCaps of the src pad. Unref the caps when you no longer need it. */ GstCaps * gst_niimaqsrc_get_cam_caps (GstNiImaqSrc * src) { GstCaps *gcaps = NULL; Int32 rval; uInt32 val; gint depth, bpp; GstVideoInfo vinfo; if (!src->iid) { GST_ELEMENT_ERROR (src, RESOURCE, FAILED, ("Camera interface not open"), ("Camera interface not open")); goto error; } gst_video_info_init (&vinfo); GST_LOG_OBJECT (src, "Retrieving attributes from IMAQ interface"); rval = imgGetAttribute (src->iid, IMG_ATTR_BITSPERPIXEL, &val); gst_niimaqsrc_report_imaq_error (rval); bpp = val; rval &= imgGetAttribute (src->iid, IMG_ATTR_BYTESPERPIXEL, &val); gst_niimaqsrc_report_imaq_error (rval); depth = val * 8; rval &= imgGetAttribute (src->iid, IMG_ATTR_ROI_WIDTH, &val); gst_niimaqsrc_report_imaq_error (rval); vinfo.width = val; rval &= imgGetAttribute (src->iid, IMG_ATTR_ROI_HEIGHT, &val); gst_niimaqsrc_report_imaq_error (rval); vinfo.height = val; if (rval) { GST_ELEMENT_ERROR (src, STREAM, FAILED, ("attempt to read attributes failed"), ("attempt to read attributes failed")); goto error; } if (depth == 8) vinfo.finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_GRAY8); else if (depth == 16) vinfo.finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_GRAY16_LE); else if (depth == 32) vinfo.finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_BGRA); else { GST_ERROR_OBJECT (src, "Depth %d (%d-bit) not supported yet", depth, bpp); goto error; } vinfo.fps_n = 30; vinfo.fps_d = 1; /* hard code framerate and par as IMAQ doesn't tell us anything about it */ gcaps = gst_video_info_to_caps (&vinfo); GST_LOG_OBJECT (src, "the camera caps are %" GST_PTR_FORMAT, gcaps); return gcaps; error: if (gcaps) { gst_caps_unref (gcaps); } return NULL; }
static gboolean gst_mpeg2dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query) { GstMpeg2dec *dec = GST_MPEG2DEC (decoder); GstBufferPool *pool; guint size, min, max; GstStructure *config, *down_config = NULL; GstAllocator *allocator; GstAllocationParams params; gboolean update_allocator; gboolean has_videometa = FALSE; GstCaps *caps; /* Get rid of ancient pool */ if (dec->downstream_pool) { gst_buffer_pool_set_active (dec->downstream_pool, FALSE); gst_object_unref (dec->downstream_pool); dec->downstream_pool = NULL; } /* Get negotiated allocation caps */ gst_query_parse_allocation (query, &caps, NULL); /* Set allocation parameters to guarantee 16-byte aligned output buffers */ if (gst_query_get_n_allocation_params (query) > 0) { gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms); update_allocator = TRUE; } else { allocator = NULL; gst_allocation_params_init (¶ms); update_allocator = FALSE; } params.align = MAX (params.align, 15); if (update_allocator) gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms); else gst_query_add_allocation_param (query, allocator, ¶ms); /* Now chain up to the parent class to guarantee that we can * get a buffer pool from the query */ if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder, query)) { if (allocator) gst_object_unref (allocator); return FALSE; } gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); config = gst_buffer_pool_get_config (pool); if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) { gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); has_videometa = TRUE; } if (dec->need_alignment) { /* If downstream does not support video meta, we will have to copy, keep * the downstream pool to avoid double copying */ if (!has_videometa) { dec->downstream_pool = pool; pool = NULL; down_config = config; config = NULL; min = 2; max = 0; } /* In case downstream support video meta, but the downstream pool does not * have alignment support, discard downstream pool and use video pool */ else if (!gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) { gst_object_unref (pool); pool = NULL; gst_structure_free (config); config = NULL; } if (!pool) pool = gst_mpeg2dec_create_generic_pool (allocator, ¶ms, caps, size, min, max, &config); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); gst_buffer_pool_config_set_video_alignment (config, &dec->valign); } if (allocator) gst_object_unref (allocator); /* If we are copying out, we'll need to setup and activate the other pool */ if (dec->downstream_pool) { if (!gst_buffer_pool_set_config (dec->downstream_pool, down_config)) { down_config = gst_buffer_pool_get_config (dec->downstream_pool); if (!gst_buffer_pool_config_validate_params (down_config, caps, size, min, max)) { gst_structure_free (down_config); goto config_failed; } if (!gst_buffer_pool_set_config (dec->downstream_pool, down_config)) goto config_failed; } if (!gst_buffer_pool_set_active (dec->downstream_pool, TRUE)) goto activate_failed; } /* Now configure the pool, if the pool had made some changes, it will * return FALSE. Validate the changes ...*/ if (!gst_buffer_pool_set_config (pool, config)) { config = gst_buffer_pool_get_config (pool); /* Check basic params */ if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) { gst_structure_free (config); goto config_failed; } /* If needed, check that resulting alignment is still valid */ if (dec->need_alignment) { GstVideoAlignment valign; if (!gst_buffer_pool_config_get_video_alignment (config, &valign)) { gst_structure_free (config); goto config_failed; } if (valign.padding_left != 0 || valign.padding_top != 0 || valign.padding_right < dec->valign.padding_right || valign.padding_bottom < dec->valign.padding_bottom) { gst_structure_free (config); goto config_failed; } } if (!gst_buffer_pool_set_config (pool, config)) goto config_failed; } /* For external pools, we need to check strides */ if (!GST_IS_VIDEO_BUFFER_POOL (pool) && has_videometa) { GstBuffer *buffer; const GstVideoFormatInfo *finfo; GstVideoMeta *vmeta; gint uv_stride; if (!gst_buffer_pool_set_active (pool, TRUE)) goto activate_failed; if (gst_buffer_pool_acquire_buffer (pool, &buffer, NULL) != GST_FLOW_OK) { gst_buffer_pool_set_active (pool, FALSE); goto acquire_failed; } vmeta = gst_buffer_get_video_meta (buffer); finfo = gst_video_format_get_info (vmeta->format); /* Check that strides are compatible. In this case, we can scale the * stride directly since all the pixel strides for the formats we support * is 1 */ uv_stride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, 1, vmeta->stride[0]); if (uv_stride != vmeta->stride[1] || uv_stride != vmeta->stride[2]) { gst_buffer_pool_set_active (pool, FALSE); gst_object_unref (pool); pool = gst_mpeg2dec_create_generic_pool (allocator, ¶ms, caps, size, min, max, &config); if (dec->need_alignment) { gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); gst_buffer_pool_config_set_video_alignment (config, &dec->valign); } /* Generic pool don't fail on _set_config() */ gst_buffer_pool_set_config (pool, config); } gst_buffer_unref (buffer); } gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max); gst_object_unref (pool); return TRUE; config_failed: gst_object_unref (pool); GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS, ("Failed to configure buffer pool"), ("Configuration is most likely invalid, please report this issue.")); return FALSE; activate_failed: gst_object_unref (pool); GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS, ("Failed to activate buffer pool"), (NULL)); return FALSE; acquire_failed: gst_object_unref (pool); GST_ELEMENT_ERROR (dec, RESOURCE, SETTINGS, ("Failed to acquire a buffer"), (NULL)); return FALSE; }
/** * gst_video_frame_map_id: * @frame: pointer to #GstVideoFrame * @info: a #GstVideoInfo * @buffer: the buffer to map * @id: the frame id to map * @flags: #GstMapFlags * * Use @info and @buffer to fill in the values of @frame with the video frame * information of frame @id. * * When @id is -1, the default frame is mapped. When @id != -1, this function * will return %FALSE when there is no GstVideoMeta with that id. * * All video planes of @buffer will be mapped and the pointers will be set in * @frame->data. * * Returns: %TRUE on success. */ gboolean gst_video_frame_map_id (GstVideoFrame * frame, GstVideoInfo * info, GstBuffer * buffer, gint id, GstMapFlags flags) { GstVideoMeta *meta; gint i; g_return_val_if_fail (frame != NULL, FALSE); g_return_val_if_fail (info != NULL, FALSE); g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE); if (id == -1) meta = gst_buffer_get_video_meta (buffer); else meta = gst_buffer_get_video_meta_id (buffer, id); /* copy the info */ frame->info = *info; if (meta) { frame->info.finfo = gst_video_format_get_info (meta->format); frame->info.width = meta->width; frame->info.height = meta->height; frame->id = meta->id; frame->flags = meta->flags; for (i = 0; i < info->finfo->n_planes; i++) if (!gst_video_meta_map (meta, i, &frame->map[i], &frame->data[i], &frame->info.stride[i], flags)) goto frame_map_failed; } else { /* no metadata, we really need to have the metadata when the id is * specified. */ if (id != -1) goto no_metadata; frame->id = id; frame->flags = 0; if (!gst_buffer_map (buffer, &frame->map[0], flags)) goto map_failed; /* do some sanity checks */ if (frame->map[0].size < info->size) goto invalid_size; /* set up pointers */ for (i = 0; i < info->finfo->n_planes; i++) { frame->data[i] = frame->map[0].data + info->offset[i]; } } frame->buffer = gst_buffer_ref (buffer); frame->meta = meta; /* buffer flags enhance the frame flags */ if (GST_VIDEO_INFO_IS_INTERLACED (info)) { if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED)) frame->flags |= GST_VIDEO_FRAME_FLAG_INTERLACED; if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF)) frame->flags |= GST_VIDEO_FRAME_FLAG_TFF; if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_RFF)) frame->flags |= GST_VIDEO_FRAME_FLAG_RFF; if (GST_BUFFER_FLAG_IS_SET (buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD)) frame->flags |= GST_VIDEO_FRAME_FLAG_ONEFIELD; } return TRUE; /* ERRORS */ no_metadata: { GST_ERROR ("no GstVideoMeta for id %d", id); return FALSE; } frame_map_failed: { GST_ERROR ("failed to map video frame plane %d", i); while (--i >= 0) gst_video_meta_unmap (meta, i, &frame->map[i]); return FALSE; } map_failed: { GST_ERROR ("failed to map buffer"); return FALSE; } invalid_size: { GST_ERROR ("invalid buffer size %" G_GSIZE_FORMAT " < %" G_GSIZE_FORMAT, frame->map[0].size, info->size); gst_buffer_unmap (buffer, &frame->map[0]); return FALSE; } }
static gboolean gst_video_test_src_setcaps (GstBaseSrc * bsrc, GstCaps * caps) { const GstStructure *structure; GstVideoTestSrc *videotestsrc; GstVideoInfo info; videotestsrc = GST_VIDEO_TEST_SRC (bsrc); structure = gst_caps_get_structure (caps, 0); if (gst_structure_has_name (structure, "video/x-raw")) { /* we can use the parsing code */ if (!gst_video_info_from_caps (&info, caps)) goto parse_failed; } else if (gst_structure_has_name (structure, "video/x-bayer")) { gint x_inv = 0, y_inv = 0; gst_video_info_init (&info); info.finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_GRAY8); if (!gst_video_test_src_parse_caps (caps, &info.width, &info.height, &info.fps_n, &info.fps_d, &info.colorimetry, &x_inv, &y_inv)) goto parse_failed; info.size = GST_ROUND_UP_4 (info.width) * info.height; info.stride[0] = GST_ROUND_UP_4 (info.width); videotestsrc->bayer = TRUE; videotestsrc->x_invert = x_inv; videotestsrc->y_invert = y_inv; } /* looks ok here */ videotestsrc->info = info; GST_DEBUG_OBJECT (videotestsrc, "size %dx%d, %d/%d fps", info.width, info.height, info.fps_n, info.fps_d); g_free (videotestsrc->tmpline); g_free (videotestsrc->tmpline2); g_free (videotestsrc->tmpline_u8); g_free (videotestsrc->tmpline_u16); videotestsrc->tmpline_u8 = g_malloc (info.width + 8); videotestsrc->tmpline = g_malloc ((info.width + 8) * 4); videotestsrc->tmpline2 = g_malloc ((info.width + 8) * 4); videotestsrc->tmpline_u16 = g_malloc ((info.width + 16) * 8); videotestsrc->accum_rtime += videotestsrc->running_time; videotestsrc->accum_frames += videotestsrc->n_frames; videotestsrc->running_time = 0; videotestsrc->n_frames = 0; return TRUE; /* ERRORS */ parse_failed: { GST_DEBUG_OBJECT (bsrc, "failed to parse caps"); return FALSE; } }