static gdouble gst_compare_ssim (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) { GstCaps *caps; GstVideoFormat format, f; gint width, height, w, h, i, comps; gdouble cssim[4], ssim, c[4] = { 1.0, 0.0, 0.0, 0.0 }; guint8 *data1, *data2; caps = GST_BUFFER_CAPS (buf1); if (!caps) goto invalid_input; if (!gst_video_format_parse_caps (caps, &format, &width, &height)) goto invalid_input; caps = GST_BUFFER_CAPS (buf2); if (!caps) goto invalid_input; if (!gst_video_format_parse_caps (caps, &f, &w, &h)) goto invalid_input; if (f != format || w != width || h != height) return comp->threshold + 1; comps = gst_video_format_is_gray (format) ? 1 : 3; if (gst_video_format_has_alpha (format)) comps += 1; /* note that some are reported both yuv and gray */ for (i = 0; i < comps; ++i) c[i] = 1.0; /* increase luma weight if yuv */ if (gst_video_format_is_yuv (format) && (comps > 1)) c[0] = comps - 1; for (i = 0; i < comps; ++i) c[i] /= (gst_video_format_is_yuv (format) && (comps > 1)) ? 2 * (comps - 1) : comps; data1 = GST_BUFFER_DATA (buf1); data2 = GST_BUFFER_DATA (buf2); for (i = 0; i < comps; i++) { gint offset, cw, ch, step, stride; /* only support most common formats */ if (gst_video_format_get_component_depth (format, i) != 8) goto unsupported_input; offset = gst_video_format_get_component_offset (format, i, width, height); cw = gst_video_format_get_component_width (format, i, width); ch = gst_video_format_get_component_height (format, i, height); step = gst_video_format_get_pixel_stride (format, i); stride = gst_video_format_get_row_stride (format, i, width); GST_LOG_OBJECT (comp, "component %d", i); cssim[i] = gst_compare_ssim_component (comp, data1 + offset, data2 + offset, cw, ch, step, stride); GST_LOG_OBJECT (comp, "ssim[%d] = %f", i, cssim[i]); } #ifndef GST_DISABLE_GST_DEBUG for (i = 0; i < 4; i++) { GST_DEBUG_OBJECT (comp, "ssim[%d] = %f, c[%d] = %f", i, cssim[i], i, c[i]); } #endif ssim = cssim[0] * c[0] + cssim[1] * c[1] + cssim[2] * c[2] + cssim[3] * c[3]; return ssim; /* ERRORS */ invalid_input: { GST_ERROR_OBJECT (comp, "ssim method needs raw video input"); return 0; } unsupported_input: { GST_ERROR_OBJECT (comp, "raw video format not supported %" GST_PTR_FORMAT, caps); return 0; } }
static GstFlowReturn gst_jasper_dec_negotiate (GstJasperDec * dec, jas_image_t * image) { GstFlowReturn flow_ret = GST_FLOW_OK; gint width, height, channels; gint i, j; gboolean negotiate = FALSE; jas_clrspc_t clrspc; GstCaps *allowed_caps, *caps; width = jas_image_width (image); height = jas_image_height (image); channels = jas_image_numcmpts (image); GST_LOG_OBJECT (dec, "%d x %d, %d components", width, height, channels); /* jp2c bitstream has no real colour space info (kept in container), * so decoder may only pretend to know, where it really does not */ if (!jas_clrspc_isunknown (dec->clrspc)) { clrspc = dec->clrspc; GST_DEBUG_OBJECT (dec, "forcing container supplied colour space %d", clrspc); jas_image_setclrspc (image, clrspc); } else clrspc = jas_image_clrspc (image); if (!width || !height || !channels || jas_clrspc_isunknown (clrspc)) goto fail_image; if (dec->width != width || dec->height != height || dec->channels != channels || dec->clrspc != clrspc) negotiate = TRUE; if (channels != 3) goto not_supported; for (i = 0; i < channels; i++) { gint cheight, cwidth, depth, sgnd; cheight = jas_image_cmptheight (image, i); cwidth = jas_image_cmptwidth (image, i); depth = jas_image_cmptprec (image, i); sgnd = jas_image_cmptsgnd (image, i); GST_LOG_OBJECT (dec, "image component %d, %dx%d, depth %d, sgnd %d", i, cwidth, cheight, depth, sgnd); if (depth != 8 || sgnd) goto not_supported; if (dec->cheight[i] != cheight || dec->cwidth[i] != cwidth) { dec->cheight[i] = cheight; dec->cwidth[i] = cwidth; negotiate = TRUE; } } if (!negotiate && dec->format != GST_VIDEO_FORMAT_UNKNOWN) goto done; /* clear and refresh to new state */ flow_ret = GST_FLOW_NOT_NEGOTIATED; dec->format = GST_VIDEO_FORMAT_UNKNOWN; dec->width = width; dec->height = height; dec->channels = channels; /* retrieve allowed caps, and find the first one that reasonably maps * to the parameters of the colourspace */ caps = gst_pad_get_allowed_caps (dec->srcpad); if (!caps) { GST_DEBUG_OBJECT (dec, "... but no peer, using template caps"); /* need to copy because get_allowed_caps returns a ref, and get_pad_template_caps doesn't */ caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad)); } /* avoid lists of fourcc, etc */ allowed_caps = gst_caps_normalize (caps); caps = NULL; GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps); for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { GstVideoFormat format; gboolean ok; if (caps) gst_caps_unref (caps); caps = gst_caps_copy_nth (allowed_caps, i); /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */ gst_pad_fixate_caps (dec->srcpad, caps); GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps); if (!gst_video_format_parse_caps (caps, &format, NULL, NULL)) continue; if (gst_video_format_is_rgb (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_RGB) { GST_DEBUG_OBJECT (dec, "trying RGB"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_R))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_G))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_B))) < 0) { GST_DEBUG_OBJECT (dec, "missing RGB color component"); continue; } } else if (gst_video_format_is_yuv (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_YCBCR) { GST_DEBUG_OBJECT (dec, "trying YUV"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_Y))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CB))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CR))) < 0) { GST_DEBUG_OBJECT (dec, "missing YUV color component"); continue; } } else continue; /* match format with validity checks */ ok = TRUE; for (j = 0; j < channels; j++) { gint cmpt; cmpt = dec->cmpt[j]; if (dec->cwidth[cmpt] != gst_video_format_get_component_width (format, j, width) || dec->cheight[cmpt] != gst_video_format_get_component_height (format, j, height)) ok = FALSE; } /* commit to this format */ if (ok) { dec->format = format; break; } } if (caps) gst_caps_unref (caps); gst_caps_unref (allowed_caps); if (dec->format != GST_VIDEO_FORMAT_UNKNOWN) { /* cache some video format properties */ for (j = 0; j < channels; ++j) { dec->offset[j] = gst_video_format_get_component_offset (dec->format, j, dec->width, dec->height); dec->inc[j] = gst_video_format_get_pixel_stride (dec->format, j); dec->stride[j] = gst_video_format_get_row_stride (dec->format, j, dec->width); } dec->image_size = gst_video_format_get_size (dec->format, width, height); dec->alpha = gst_video_format_has_alpha (dec->format); if (dec->buf) g_free (dec->buf); dec->buf = g_new0 (glong, dec->width); caps = gst_video_format_new_caps (dec->format, dec->width, dec->height, dec->framerate_numerator, dec->framerate_denominator, 1, 1); GST_DEBUG_OBJECT (dec, "Set format to %d, size to %dx%d", dec->format, dec->width, dec->height); if (!gst_pad_set_caps (dec->srcpad, caps)) flow_ret = GST_FLOW_NOT_NEGOTIATED; else flow_ret = GST_FLOW_OK; gst_caps_unref (caps); } done: return flow_ret; /* ERRORS */ fail_image: { GST_DEBUG_OBJECT (dec, "Failed to process decoded image."); flow_ret = GST_FLOW_NOT_NEGOTIATED; goto done; } not_supported: { GST_DEBUG_OBJECT (dec, "Decoded image has unsupported colour space."); GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Unsupported colorspace")); flow_ret = GST_FLOW_ERROR; goto done; } }
/** * gst_video_format_new_caps: * @format: the #GstVideoFormat describing the raw video format * @width: width of video * @height: height of video * @framerate_n: numerator of frame rate * @framerate_d: denominator of frame rate * @par_n: numerator of pixel aspect ratio * @par_d: denominator of pixel aspect ratio * * Creates a new #GstCaps object based on the parameters provided. * * Since: 0.10.16 * * Returns: a new #GstCaps object, or NULL if there was an error */ GstCaps * gst_video_format_new_caps (GstVideoFormat format, int width, int height, int framerate_n, int framerate_d, int par_n, int par_d) { g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL); g_return_val_if_fail (width > 0 && height > 0, NULL); if (gst_video_format_is_yuv (format)) { return gst_caps_new_simple ("video/x-raw-yuv", "format", GST_TYPE_FOURCC, gst_video_format_to_fourcc (format), "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, framerate_n, framerate_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL); } if (gst_video_format_is_rgb (format)) { GstCaps *caps; int red_mask; int blue_mask; int green_mask; int alpha_mask; int depth; int bpp; gboolean have_alpha; unsigned int mask; switch (format) { case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_xBGR: bpp = 32; depth = 24; have_alpha = FALSE; break; case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ABGR: bpp = 32; depth = 32; have_alpha = TRUE; break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: bpp = 24; depth = 24; have_alpha = FALSE; break; default: return NULL; } if (bpp == 32) { mask = 0xff000000; } else { mask = 0xff0000; } red_mask = mask >> (8 * gst_video_format_get_component_offset (format, 0, width, height)); green_mask = mask >> (8 * gst_video_format_get_component_offset (format, 1, width, height)); blue_mask = mask >> (8 * gst_video_format_get_component_offset (format, 2, width, height)); caps = gst_caps_new_simple ("video/x-raw-rgb", "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, "endianness", G_TYPE_INT, G_BIG_ENDIAN, "red_mask", G_TYPE_INT, red_mask, "green_mask", G_TYPE_INT, green_mask, "blue_mask", G_TYPE_INT, blue_mask, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, framerate_n, framerate_d, "pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL); if (have_alpha) { alpha_mask = mask >> (8 * gst_video_format_get_component_offset (format, 3, width, height)); gst_caps_set_simple (caps, "alpha_mask", G_TYPE_INT, alpha_mask, NULL); } return caps; } return NULL; }