static gboolean gst_jasper_enc_sink_setcaps (GstPad * pad, GstCaps * caps) { GstJasperEnc *enc; GstVideoFormat format; gint width, height; gint fps_num, fps_den; gint par_num, par_den; gint i; enc = GST_JASPER_ENC (GST_PAD_PARENT (pad)); /* get info from caps */ if (!gst_video_format_parse_caps (caps, &format, &width, &height)) goto refuse_caps; /* optional; pass along if present */ fps_num = fps_den = -1; par_num = par_den = -1; gst_video_parse_caps_framerate (caps, &fps_num, &fps_den); gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den); if (width == enc->width && height == enc->height && enc->format == format && fps_num == enc->fps_num && fps_den == enc->fps_den && par_num == enc->par_num && par_den == enc->par_den) return TRUE; /* store input description */ enc->format = format; enc->width = width; enc->height = height; enc->fps_num = fps_num; enc->fps_den = fps_den; enc->par_num = par_num; enc->par_den = par_den; /* prepare a cached image description */ enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0); for (i = 0; i < enc->channels; ++i) { enc->cwidth[i] = gst_video_format_get_component_width (format, i, width); enc->cheight[i] = gst_video_format_get_component_height (format, i, height); enc->offset[i] = gst_video_format_get_component_offset (format, i, width, height); enc->stride[i] = gst_video_format_get_row_stride (format, i, width); enc->inc[i] = gst_video_format_get_pixel_stride (format, i); } if (!gst_jasper_enc_set_src_caps (enc)) goto setcaps_failed; if (!gst_jasper_enc_init_encoder (enc)) goto setup_failed; return TRUE; /* ERRORS */ setup_failed: { GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL), (NULL)); return FALSE; } setcaps_failed: { GST_WARNING_OBJECT (enc, "Setting src caps failed"); GST_ELEMENT_ERROR (enc, LIBRARY, SETTINGS, (NULL), (NULL)); return FALSE; } refuse_caps: { GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps); gst_object_unref (enc); return FALSE; } }
static GstFlowReturn gst_jasper_dec_negotiate (GstJasperDec * dec, jas_image_t * image) { GstFlowReturn flow_ret = GST_FLOW_OK; gint width, height, channels; gint i, j; gboolean negotiate = FALSE; jas_clrspc_t clrspc; GstCaps *allowed_caps, *caps; width = jas_image_width (image); height = jas_image_height (image); channels = jas_image_numcmpts (image); GST_LOG_OBJECT (dec, "%d x %d, %d components", width, height, channels); /* jp2c bitstream has no real colour space info (kept in container), * so decoder may only pretend to know, where it really does not */ if (!jas_clrspc_isunknown (dec->clrspc)) { clrspc = dec->clrspc; GST_DEBUG_OBJECT (dec, "forcing container supplied colour space %d", clrspc); jas_image_setclrspc (image, clrspc); } else clrspc = jas_image_clrspc (image); if (!width || !height || !channels || jas_clrspc_isunknown (clrspc)) goto fail_image; if (dec->width != width || dec->height != height || dec->channels != channels || dec->clrspc != clrspc) negotiate = TRUE; if (channels != 3) goto not_supported; for (i = 0; i < channels; i++) { gint cheight, cwidth, depth, sgnd; cheight = jas_image_cmptheight (image, i); cwidth = jas_image_cmptwidth (image, i); depth = jas_image_cmptprec (image, i); sgnd = jas_image_cmptsgnd (image, i); GST_LOG_OBJECT (dec, "image component %d, %dx%d, depth %d, sgnd %d", i, cwidth, cheight, depth, sgnd); if (depth != 8 || sgnd) goto not_supported; if (dec->cheight[i] != cheight || dec->cwidth[i] != cwidth) { dec->cheight[i] = cheight; dec->cwidth[i] = cwidth; negotiate = TRUE; } } if (!negotiate && dec->format != GST_VIDEO_FORMAT_UNKNOWN) goto done; /* clear and refresh to new state */ flow_ret = GST_FLOW_NOT_NEGOTIATED; dec->format = GST_VIDEO_FORMAT_UNKNOWN; dec->width = width; dec->height = height; dec->channels = channels; /* retrieve allowed caps, and find the first one that reasonably maps * to the parameters of the colourspace */ caps = gst_pad_get_allowed_caps (dec->srcpad); if (!caps) { GST_DEBUG_OBJECT (dec, "... but no peer, using template caps"); /* need to copy because get_allowed_caps returns a ref, and get_pad_template_caps doesn't */ caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad)); } /* avoid lists of fourcc, etc */ allowed_caps = gst_caps_normalize (caps); caps = NULL; GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps); for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { GstVideoFormat format; gboolean ok; if (caps) gst_caps_unref (caps); caps = gst_caps_copy_nth (allowed_caps, i); /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */ gst_pad_fixate_caps (dec->srcpad, caps); GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps); if (!gst_video_format_parse_caps (caps, &format, NULL, NULL)) continue; if (gst_video_format_is_rgb (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_RGB) { GST_DEBUG_OBJECT (dec, "trying RGB"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_R))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_G))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_B))) < 0) { GST_DEBUG_OBJECT (dec, "missing RGB color component"); continue; } } else if (gst_video_format_is_yuv (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_YCBCR) { GST_DEBUG_OBJECT (dec, "trying YUV"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_Y))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CB))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CR))) < 0) { GST_DEBUG_OBJECT (dec, "missing YUV color component"); continue; } } else continue; /* match format with validity checks */ ok = TRUE; for (j = 0; j < channels; j++) { gint cmpt; cmpt = dec->cmpt[j]; if (dec->cwidth[cmpt] != gst_video_format_get_component_width (format, j, width) || dec->cheight[cmpt] != gst_video_format_get_component_height (format, j, height)) ok = FALSE; } /* commit to this format */ if (ok) { dec->format = format; break; } } if (caps) gst_caps_unref (caps); gst_caps_unref (allowed_caps); if (dec->format != GST_VIDEO_FORMAT_UNKNOWN) { /* cache some video format properties */ for (j = 0; j < channels; ++j) { dec->offset[j] = gst_video_format_get_component_offset (dec->format, j, dec->width, dec->height); dec->inc[j] = gst_video_format_get_pixel_stride (dec->format, j); dec->stride[j] = gst_video_format_get_row_stride (dec->format, j, dec->width); } dec->image_size = gst_video_format_get_size (dec->format, width, height); dec->alpha = gst_video_format_has_alpha (dec->format); if (dec->buf) g_free (dec->buf); dec->buf = g_new0 (glong, dec->width); caps = gst_video_format_new_caps (dec->format, dec->width, dec->height, dec->framerate_numerator, dec->framerate_denominator, 1, 1); GST_DEBUG_OBJECT (dec, "Set format to %d, size to %dx%d", dec->format, dec->width, dec->height); if (!gst_pad_set_caps (dec->srcpad, caps)) flow_ret = GST_FLOW_NOT_NEGOTIATED; else flow_ret = GST_FLOW_OK; gst_caps_unref (caps); } done: return flow_ret; /* ERRORS */ fail_image: { GST_DEBUG_OBJECT (dec, "Failed to process decoded image."); flow_ret = GST_FLOW_NOT_NEGOTIATED; goto done; } not_supported: { GST_DEBUG_OBJECT (dec, "Decoded image has unsupported colour space."); GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Unsupported colorspace")); flow_ret = GST_FLOW_ERROR; goto done; } }
static gdouble gst_compare_ssim (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) { GstCaps *caps; GstVideoFormat format, f; gint width, height, w, h, i, comps; gdouble cssim[4], ssim, c[4] = { 1.0, 0.0, 0.0, 0.0 }; guint8 *data1, *data2; caps = GST_BUFFER_CAPS (buf1); if (!caps) goto invalid_input; if (!gst_video_format_parse_caps (caps, &format, &width, &height)) goto invalid_input; caps = GST_BUFFER_CAPS (buf2); if (!caps) goto invalid_input; if (!gst_video_format_parse_caps (caps, &f, &w, &h)) goto invalid_input; if (f != format || w != width || h != height) return comp->threshold + 1; comps = gst_video_format_is_gray (format) ? 1 : 3; if (gst_video_format_has_alpha (format)) comps += 1; /* note that some are reported both yuv and gray */ for (i = 0; i < comps; ++i) c[i] = 1.0; /* increase luma weight if yuv */ if (gst_video_format_is_yuv (format) && (comps > 1)) c[0] = comps - 1; for (i = 0; i < comps; ++i) c[i] /= (gst_video_format_is_yuv (format) && (comps > 1)) ? 2 * (comps - 1) : comps; data1 = GST_BUFFER_DATA (buf1); data2 = GST_BUFFER_DATA (buf2); for (i = 0; i < comps; i++) { gint offset, cw, ch, step, stride; /* only support most common formats */ if (gst_video_format_get_component_depth (format, i) != 8) goto unsupported_input; offset = gst_video_format_get_component_offset (format, i, width, height); cw = gst_video_format_get_component_width (format, i, width); ch = gst_video_format_get_component_height (format, i, height); step = gst_video_format_get_pixel_stride (format, i); stride = gst_video_format_get_row_stride (format, i, width); GST_LOG_OBJECT (comp, "component %d", i); cssim[i] = gst_compare_ssim_component (comp, data1 + offset, data2 + offset, cw, ch, step, stride); GST_LOG_OBJECT (comp, "ssim[%d] = %f", i, cssim[i]); } #ifndef GST_DISABLE_GST_DEBUG for (i = 0; i < 4; i++) { GST_DEBUG_OBJECT (comp, "ssim[%d] = %f, c[%d] = %f", i, cssim[i], i, c[i]); } #endif ssim = cssim[0] * c[0] + cssim[1] * c[1] + cssim[2] * c[2] + cssim[3] * c[3]; return ssim; /* ERRORS */ invalid_input: { GST_ERROR_OBJECT (comp, "ssim method needs raw video input"); return 0; } unsupported_input: { GST_ERROR_OBJECT (comp, "raw video format not supported %" GST_PTR_FORMAT, caps); return 0; } }
static gboolean gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps) { GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad)); GstVideoFormat format; gint width, height; gint fps_num, fps_den; gint par_num, par_den; gint i; GstCaps *othercaps; gboolean ret; /* get info from caps */ if (!gst_video_format_parse_caps (caps, &format, &width, &height)) goto refuse_caps; /* optional; pass along if present */ fps_num = fps_den = -1; par_num = par_den = -1; gst_video_parse_caps_framerate (caps, &fps_num, &fps_den); gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den); if (width == enc->width && height == enc->height && enc->format == format && fps_num == enc->fps_num && fps_den == enc->fps_den && par_num == enc->par_num && par_den == enc->par_den) return TRUE; /* store input description */ enc->format = format; enc->width = width; enc->height = height; enc->fps_num = fps_num; enc->fps_den = fps_den; enc->par_num = par_num; enc->par_den = par_den; /* prepare a cached image description */ enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0); /* ... but any alpha is disregarded in encoding */ if (gst_video_format_is_gray (format)) enc->channels = 1; else enc->channels = 3; enc->h_max_samp = 0; enc->v_max_samp = 0; for (i = 0; i < enc->channels; ++i) { enc->cwidth[i] = gst_video_format_get_component_width (format, i, width); enc->cheight[i] = gst_video_format_get_component_height (format, i, height); enc->offset[i] = gst_video_format_get_component_offset (format, i, width, height); enc->stride[i] = gst_video_format_get_row_stride (format, i, width); enc->inc[i] = gst_video_format_get_pixel_stride (format, i); enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i]; enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]); enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i]; enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]); } /* samp should only be 1, 2 or 4 */ g_assert (enc->h_max_samp <= 4); g_assert (enc->v_max_samp <= 4); /* now invert */ /* maximum is invariant, as one of the components should have samp 1 */ for (i = 0; i < enc->channels; ++i) { enc->h_samp[i] = enc->h_max_samp / enc->h_samp[i]; enc->v_samp[i] = enc->v_max_samp / enc->v_samp[i]; } enc->planar = (enc->inc[0] == 1 && enc->inc[1] == 1 && enc->inc[2] == 1); othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad)); gst_caps_set_simple (othercaps, "width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL); if (enc->fps_den > 0) gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL); if (enc->par_den > 0) gst_caps_set_simple (othercaps, "pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den, NULL); ret = gst_pad_set_caps (enc->srcpad, othercaps); gst_caps_unref (othercaps); if (ret) gst_jpegenc_resync (enc); gst_object_unref (enc); return ret; /* ERRORS */ refuse_caps: { GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps); gst_object_unref (enc); return FALSE; } }
static void gst_deinterlace_method_setup_impl (GstDeinterlaceMethod * self, GstVideoFormat format, gint width, gint height) { gint i; gint n_components; GstDeinterlaceMethodClass *klass = GST_DEINTERLACE_METHOD_GET_CLASS (self); self->format = format; self->frame_width = width; self->frame_height = height; self->deinterlace_frame = NULL; if (format == GST_VIDEO_FORMAT_UNKNOWN) return; n_components = 3 + gst_video_format_has_alpha (format); for (i = 0; i < n_components; i++) { self->width[i] = gst_video_format_get_component_width (format, i, width); self->height[i] = gst_video_format_get_component_height (format, i, height); self->offset[i] = gst_video_format_get_component_offset (format, i, width, height); self->row_stride[i] = gst_video_format_get_row_stride (format, i, width); self->pixel_stride[i] = gst_video_format_get_pixel_stride (format, i); } switch (format) { case GST_VIDEO_FORMAT_YUY2: self->deinterlace_frame = klass->deinterlace_frame_yuy2; break; case GST_VIDEO_FORMAT_YVYU: self->deinterlace_frame = klass->deinterlace_frame_yvyu; break; case GST_VIDEO_FORMAT_UYVY: self->deinterlace_frame = klass->deinterlace_frame_uyvy; break; case GST_VIDEO_FORMAT_I420: self->deinterlace_frame = klass->deinterlace_frame_i420; break; case GST_VIDEO_FORMAT_YV12: self->deinterlace_frame = klass->deinterlace_frame_yv12; break; case GST_VIDEO_FORMAT_Y444: self->deinterlace_frame = klass->deinterlace_frame_y444; break; case GST_VIDEO_FORMAT_Y42B: self->deinterlace_frame = klass->deinterlace_frame_y42b; break; case GST_VIDEO_FORMAT_Y41B: self->deinterlace_frame = klass->deinterlace_frame_y41b; break; case GST_VIDEO_FORMAT_AYUV: self->deinterlace_frame = klass->deinterlace_frame_ayuv; break; case GST_VIDEO_FORMAT_NV12: self->deinterlace_frame = klass->deinterlace_frame_nv12; break; case GST_VIDEO_FORMAT_NV21: self->deinterlace_frame = klass->deinterlace_frame_nv21; break; case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_xRGB: self->deinterlace_frame = klass->deinterlace_frame_argb; break; case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_xBGR: self->deinterlace_frame = klass->deinterlace_frame_abgr; break; case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_RGBx: self->deinterlace_frame = klass->deinterlace_frame_rgba; break; case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_BGRx: self->deinterlace_frame = klass->deinterlace_frame_bgra; break; case GST_VIDEO_FORMAT_RGB: self->deinterlace_frame = klass->deinterlace_frame_rgb; break; case GST_VIDEO_FORMAT_BGR: self->deinterlace_frame = klass->deinterlace_frame_bgr; break; default: self->deinterlace_frame = NULL; break; } }