static void gst_video_balance_update_properties (GstVideoBalance * videobalance) { gboolean passthrough; GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance); GST_OBJECT_LOCK (videobalance); passthrough = gst_video_balance_is_passthrough (videobalance); if (!passthrough) gst_video_balance_update_tables (videobalance); GST_OBJECT_UNLOCK (videobalance); gst_base_transform_set_passthrough (base, passthrough); }
static void gst_navseek_init (GstNavSeek * navseek, GstNavSeekClass * g_class) { gst_pad_set_event_function (GST_BASE_TRANSFORM (navseek)->srcpad, GST_DEBUG_FUNCPTR (gst_navseek_handle_src_event)); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (navseek), TRUE); navseek->seek_offset = 5.0; navseek->loop = FALSE; navseek->grab_seg_start = FALSE; navseek->grab_seg_end = FALSE; navseek->segment_start = GST_CLOCK_TIME_NONE; navseek->segment_end = GST_CLOCK_TIME_NONE; }
static void gst_audio_invert_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstAudioInvert *filter = GST_AUDIO_INVERT (object); switch (prop_id) { case PROP_DEGREE: filter->degree = g_value_get_float (value); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), filter->degree == 0.0); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_rg_limiter_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstRgLimiter *filter = GST_RG_LIMITER (object); switch (prop_id) { case PROP_ENABLED: filter->enabled = g_value_get_boolean (value); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), !filter->enabled); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_audio_amplify_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (object); switch (prop_id) { case PROP_AMPLIFICATION: filter->amplification = g_value_get_float (value); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), filter->amplification == 1.0); break; case PROP_CLIPPING_METHOD: gst_audio_amplify_set_process_function (filter, g_value_get_enum (value), filter->format, filter->width); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
void gst_imx_blitter_video_transform_init(GstImxBlitterVideoTransform *blitter_video_transform) { GstBaseTransform *base_transform = GST_BASE_TRANSFORM(blitter_video_transform); blitter_video_transform->initialized = FALSE; blitter_video_transform->inout_info_equal = FALSE; blitter_video_transform->inout_info_set = FALSE; gst_video_info_init(&(blitter_video_transform->input_video_info)); gst_video_info_init(&(blitter_video_transform->output_video_info)); blitter_video_transform->blitter = NULL; blitter_video_transform->input_crop = GST_IMX_BASE_BLITTER_CROP_DEFAULT; g_mutex_init(&(blitter_video_transform->mutex)); /* Set passthrough initially to FALSE ; passthrough will later be * enabled/disabled on a per-frame basis */ gst_base_transform_set_passthrough(base_transform, FALSE); gst_base_transform_set_qos_enabled(base_transform, TRUE); gst_base_transform_set_in_place(base_transform, FALSE); }
static void gst_video_flip_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstVideoFlip *videoflip = GST_VIDEO_FLIP (object); switch (prop_id) { case PROP_METHOD: { GstVideoFlipMethod method; method = g_value_get_enum (value); GST_OBJECT_LOCK (videoflip); if (method != videoflip->method) { GstBaseTransform *btrans = GST_BASE_TRANSFORM (videoflip); GST_DEBUG_OBJECT (videoflip, "Changing method from %s to %s", video_flip_methods[videoflip->method].value_nick, video_flip_methods[method].value_nick); videoflip->method = method; GST_OBJECT_UNLOCK (videoflip); gst_base_transform_set_passthrough (btrans, method == GST_VIDEO_FLIP_METHOD_IDENTITY); gst_base_transform_reconfigure (btrans); } else { GST_OBJECT_UNLOCK (videoflip); } } break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_gamma_calculate_tables (GstGamma * gamma) { gint n; gdouble val; gdouble exp; gboolean passthrough = FALSE; GST_OBJECT_LOCK (gamma); if (gamma->gamma == 1.0) { passthrough = TRUE; } else { exp = 1.0 / gamma->gamma; for (n = 0; n < 256; n++) { val = n / 255.0; val = pow (val, exp); val = 255.0 * val; gamma->gamma_table[n] = (guint8) floor (val + 0.5); } } GST_OBJECT_UNLOCK (gamma); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (gamma), passthrough); }
static void gst_video_flip_init (GstVideoFlip * videoflip, GstVideoFlipClass * klass) { videoflip->method = PROP_METHOD_DEFAULT; gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (videoflip), TRUE); }
static gboolean gst_video_scale_set_info (GstVideoFilter * filter, GstCaps * in, GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info) { GstVideoScale *videoscale = GST_VIDEO_SCALE (filter); gint from_dar_n, from_dar_d, to_dar_n, to_dar_d; if (!gst_util_fraction_multiply (in_info->width, in_info->height, in_info->par_n, in_info->par_d, &from_dar_n, &from_dar_d)) { from_dar_n = from_dar_d = -1; } if (!gst_util_fraction_multiply (out_info->width, out_info->height, out_info->par_n, out_info->par_d, &to_dar_n, &to_dar_d)) { to_dar_n = to_dar_d = -1; } videoscale->borders_w = videoscale->borders_h = 0; if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) { if (videoscale->add_borders) { gint n, d, to_h, to_w; if (from_dar_n != -1 && from_dar_d != -1 && gst_util_fraction_multiply (from_dar_n, from_dar_d, out_info->par_d, out_info->par_n, &n, &d)) { to_h = gst_util_uint64_scale_int (out_info->width, d, n); if (to_h <= out_info->height) { videoscale->borders_h = out_info->height - to_h; videoscale->borders_w = 0; } else { to_w = gst_util_uint64_scale_int (out_info->height, n, d); g_assert (to_w <= out_info->width); videoscale->borders_h = 0; videoscale->borders_w = out_info->width - to_w; } } else { GST_WARNING_OBJECT (videoscale, "Can't calculate borders"); } } else { GST_WARNING_OBJECT (videoscale, "Can't keep DAR!"); } } if (in_info->width == out_info->width && in_info->height == out_info->height && videoscale->borders_w == 0 && videoscale->borders_h == 0) { gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE); } else { GstStructure *options; GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter, "setup videoscaling"); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE); options = gst_structure_new_empty ("videoscale"); switch (videoscale->method) { case GST_VIDEO_SCALE_NEAREST: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_NEAREST, NULL); break; case GST_VIDEO_SCALE_BILINEAR: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_LINEAR, GST_VIDEO_RESAMPLER_OPT_MAX_TAPS, G_TYPE_INT, 2, NULL); break; case GST_VIDEO_SCALE_4TAP: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_SINC, GST_VIDEO_RESAMPLER_OPT_MAX_TAPS, G_TYPE_INT, 4, NULL); break; case GST_VIDEO_SCALE_LANCZOS: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_LANCZOS, NULL); break; case GST_VIDEO_SCALE_BILINEAR2: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_LINEAR, NULL); break; case GST_VIDEO_SCALE_SINC: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_SINC, NULL); break; case GST_VIDEO_SCALE_HERMITE: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC, GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 0.0, GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 0.0, NULL); break; case GST_VIDEO_SCALE_SPLINE: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC, GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 1.0, GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 0.0, NULL); break; case GST_VIDEO_SCALE_CATROM: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC, GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 0.0, GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 0.5, NULL); break; case GST_VIDEO_SCALE_MITCHELL: gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, GST_VIDEO_RESAMPLER_METHOD_CUBIC, GST_VIDEO_RESAMPLER_OPT_CUBIC_B, G_TYPE_DOUBLE, (gdouble) 1.0 / 3.0, GST_VIDEO_RESAMPLER_OPT_CUBIC_C, G_TYPE_DOUBLE, (gdouble) 1.0 / 3.0, NULL); break; } gst_structure_set (options, GST_VIDEO_RESAMPLER_OPT_ENVELOPE, G_TYPE_DOUBLE, videoscale->envelope, GST_VIDEO_RESAMPLER_OPT_SHARPNESS, G_TYPE_DOUBLE, videoscale->sharpness, GST_VIDEO_RESAMPLER_OPT_SHARPEN, G_TYPE_DOUBLE, videoscale->sharpen, GST_VIDEO_CONVERTER_OPT_DEST_X, G_TYPE_INT, videoscale->borders_w / 2, GST_VIDEO_CONVERTER_OPT_DEST_Y, G_TYPE_INT, videoscale->borders_h / 2, GST_VIDEO_CONVERTER_OPT_DEST_WIDTH, G_TYPE_INT, out_info->width - videoscale->borders_w, GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT, G_TYPE_INT, out_info->height - videoscale->borders_h, GST_VIDEO_CONVERTER_OPT_MATRIX_MODE, GST_TYPE_VIDEO_MATRIX_MODE, GST_VIDEO_MATRIX_MODE_NONE, GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD, GST_VIDEO_DITHER_NONE, GST_VIDEO_CONVERTER_OPT_CHROMA_MODE, GST_TYPE_VIDEO_CHROMA_MODE, GST_VIDEO_CHROMA_MODE_NONE, NULL); if (videoscale->gamma_decode) { gst_structure_set (options, GST_VIDEO_CONVERTER_OPT_GAMMA_MODE, GST_TYPE_VIDEO_GAMMA_MODE, GST_VIDEO_GAMMA_MODE_REMAP, NULL); } if (videoscale->convert) gst_video_converter_free (videoscale->convert); videoscale->convert = gst_video_converter_new (in_info, out_info, options); } GST_DEBUG_OBJECT (videoscale, "from=%dx%d (par=%d/%d dar=%d/%d), size %" G_GSIZE_FORMAT " -> to=%dx%d (par=%d/%d dar=%d/%d borders=%d:%d), " "size %" G_GSIZE_FORMAT, in_info->width, in_info->height, in_info->par_n, in_info->par_d, from_dar_n, from_dar_d, in_info->size, out_info->width, out_info->height, out_info->par_n, out_info->par_d, to_dar_n, to_dar_d, videoscale->borders_w, videoscale->borders_h, out_info->size); return TRUE; }
static void gst_dtmf_detect_init (GstDtmfDetect * dtmfdetect, GstDtmfDetectClass * klass) { gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (dtmfdetect), TRUE); gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (dtmfdetect), TRUE); }
static gboolean gst_video_scale_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out) { GstVideoScale *videoscale = GST_VIDEO_SCALE (trans); gboolean ret; gint from_dar_n, from_dar_d, to_dar_n, to_dar_d; gint from_par_n, from_par_d, to_par_n, to_par_d; ret = gst_video_format_parse_caps (in, &videoscale->format, &videoscale->from_width, &videoscale->from_height); ret &= gst_video_format_parse_caps (out, NULL, &videoscale->to_width, &videoscale->to_height); if (!ret) goto done; videoscale->src_size = gst_video_format_get_size (videoscale->format, videoscale->from_width, videoscale->from_height); videoscale->dest_size = gst_video_format_get_size (videoscale->format, videoscale->to_width, videoscale->to_height); if (!gst_video_parse_caps_pixel_aspect_ratio (in, &from_par_n, &from_par_d)) from_par_n = from_par_d = 1; if (!gst_video_parse_caps_pixel_aspect_ratio (out, &to_par_n, &to_par_d)) to_par_n = to_par_d = 1; if (!gst_util_fraction_multiply (videoscale->from_width, videoscale->from_height, from_par_n, from_par_d, &from_dar_n, &from_dar_d)) { from_dar_n = from_dar_d = -1; } if (!gst_util_fraction_multiply (videoscale->to_width, videoscale->to_height, to_par_n, to_par_d, &to_dar_n, &to_dar_d)) { to_dar_n = to_dar_d = -1; } videoscale->borders_w = videoscale->borders_h = 0; if (to_dar_n != from_dar_n || to_dar_d != from_dar_d) { if (videoscale->add_borders) { gint n, d, to_h, to_w; if (from_dar_n != -1 && from_dar_d != -1 && gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_n, to_par_d, &n, &d)) { to_h = gst_util_uint64_scale_int (videoscale->to_width, d, n); if (to_h <= videoscale->to_height) { videoscale->borders_h = videoscale->to_height - to_h; videoscale->borders_w = 0; } else { to_w = gst_util_uint64_scale_int (videoscale->to_height, n, d); g_assert (to_w <= videoscale->to_width); videoscale->borders_h = 0; videoscale->borders_w = videoscale->to_width - to_w; } } else { GST_WARNING_OBJECT (videoscale, "Can't calculate borders"); } } else { GST_WARNING_OBJECT (videoscale, "Can't keep DAR!"); } } if (videoscale->tmp_buf) g_free (videoscale->tmp_buf); videoscale->tmp_buf = g_malloc (videoscale->to_width * 8 * 4); gst_base_transform_set_passthrough (trans, (videoscale->from_width == videoscale->to_width && videoscale->from_height == videoscale->to_height)); GST_DEBUG_OBJECT (videoscale, "from=%dx%d (par=%d/%d dar=%d/%d), size %d " "-> to=%dx%d (par=%d/%d dar=%d/%d borders=%d:%d), size %d", videoscale->from_width, videoscale->from_height, from_par_n, from_par_d, from_dar_n, from_dar_d, videoscale->src_size, videoscale->to_width, videoscale->to_height, to_par_n, to_par_d, to_dar_n, to_dar_d, videoscale->borders_w, videoscale->borders_h, videoscale->dest_size); done: return ret; }
static void gst_gl_alpha_update_properties (GstGLAlpha * glalpha) { GstBaseTransform *base = GST_BASE_TRANSFORM (glalpha); gboolean current_passthrough, passthrough; gfloat kgl; gfloat tmp; gfloat target_r, target_g, target_b; gfloat target_y, target_u, target_v; const float *matrix = cog_rgb_to_ycbcr_matrix_8bit_sdtv; GST_OBJECT_LOCK (glalpha); switch (glalpha->method) { case ALPHA_METHOD_GREEN: target_r = 0.0; target_g = 1.0; target_b = 0.0; break; case ALPHA_METHOD_BLUE: target_r = 0.0; target_g = 0.0; target_b = 1.0; break; default: target_r = (gfloat) glalpha->target_r / 255.0; target_g = (gfloat) glalpha->target_g / 255.0; target_b = (gfloat) glalpha->target_b / 255.0; break; } target_y = matrix[0] * target_r + matrix[1] * target_g + matrix[2] * target_b + matrix[3]; /* Cb,Cr without offset here because the chroma keying * works with them being in range [-128,127] */ target_u = matrix[4] * target_r + matrix[5] * target_g + matrix[6] * target_b; target_v = matrix[8] * target_r + matrix[9] * target_g + matrix[10] * target_b; tmp = target_u * target_u + target_v * target_v; kgl = sqrt (tmp); glalpha->cb = target_u / kgl * 0.5; glalpha->cr = target_v / kgl * 0.5; tmp = 15 * tan (M_PI * glalpha->angle / 180); tmp = MIN (tmp, 255); glalpha->accept_angle_tg = tmp; tmp = 15 / tan (M_PI * glalpha->angle / 180); tmp = MIN (tmp, 255); glalpha->accept_angle_ctg = tmp; glalpha->one_over_kc = wrap (2 / kgl - 255, 0, 256); tmp = 15 * target_y / kgl; tmp = MIN (tmp, 255); glalpha->kfgy_scale = tmp; glalpha->kg = MIN (kgl, 0.5); glalpha->noise_level2 = glalpha->noise_level / 256.0 * glalpha->noise_level / 256.0; GST_INFO_OBJECT (glalpha, "target yuv: %f, %f, %f, " "kgl: %f, cb: %f, cr: %f, accept_angle_tg: %f, accept_angle_ctg: %f, " "one_over_kc: %f, kgfy_scale: %f, kg: %f, noise level: %f", (float) target_y, (float) target_u, (float) target_v, (float) kgl, (float) glalpha->cb, (float) glalpha->cr, (float) glalpha->accept_angle_tg, (float) glalpha->accept_angle_ctg, (float) glalpha->one_over_kc, (float) glalpha->kfgy_scale, (float) glalpha->kg, (float) glalpha->noise_level2); passthrough = gst_gl_alpha_is_passthrough (glalpha); GST_OBJECT_UNLOCK (glalpha); current_passthrough = gst_base_transform_is_passthrough (base); gst_base_transform_set_passthrough (base, passthrough); if (current_passthrough != passthrough) gst_base_transform_reconfigure_src (base); }
static void gst_break_my_data_init (GstBreakMyData * bmd, GstBreakMyDataClass * g_class) { gst_base_transform_set_in_place (GST_BASE_TRANSFORM (bmd), TRUE); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (bmd), TRUE); }
static gboolean gst_video_crop_set_info (GstVideoFilter * vfilter, GstCaps * in, GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info) { GstVideoCrop *crop = GST_VIDEO_CROP (vfilter); int dx, dy; GST_OBJECT_LOCK (crop); crop->need_update = FALSE; crop->crop_left = crop->prop_left; crop->crop_right = crop->prop_right; crop->crop_top = crop->prop_top; crop->crop_bottom = crop->prop_bottom; GST_OBJECT_UNLOCK (crop); dx = GST_VIDEO_INFO_WIDTH (in_info) - GST_VIDEO_INFO_WIDTH (out_info); dy = GST_VIDEO_INFO_HEIGHT (in_info) - GST_VIDEO_INFO_HEIGHT (out_info); if (crop->crop_left == -1 && crop->crop_right == -1) { crop->crop_left = dx / 2; crop->crop_right = dx / 2 + (dx & 1); } else if (crop->crop_left == -1) { if (G_UNLIKELY (crop->crop_right > dx)) goto cropping_too_much; crop->crop_left = dx - crop->crop_right; } else if (crop->crop_right == -1) { if (G_UNLIKELY (crop->crop_left > dx)) goto cropping_too_much; crop->crop_right = dx - crop->crop_left; } if (crop->crop_top == -1 && crop->crop_bottom == -1) { crop->crop_top = dy / 2; crop->crop_bottom = dy / 2 + (dy & 1); } else if (crop->crop_top == -1) { if (G_UNLIKELY (crop->crop_bottom > dy)) goto cropping_too_much; crop->crop_top = dy - crop->crop_bottom; } else if (crop->crop_bottom == -1) { if (G_UNLIKELY (crop->crop_top > dy)) goto cropping_too_much; crop->crop_bottom = dy - crop->crop_top; } if (G_UNLIKELY ((crop->crop_left + crop->crop_right) >= GST_VIDEO_INFO_WIDTH (in_info) || (crop->crop_top + crop->crop_bottom) >= GST_VIDEO_INFO_HEIGHT (in_info))) goto cropping_too_much; if (in && out) GST_LOG_OBJECT (crop, "incaps = %" GST_PTR_FORMAT ", outcaps = %" GST_PTR_FORMAT, in, out); if ((crop->crop_left | crop->crop_right | crop->crop_top | crop-> crop_bottom) == 0) { GST_LOG_OBJECT (crop, "we are using passthrough"); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), TRUE); } else { GST_LOG_OBJECT (crop, "we are not using passthrough"); gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE); } if (GST_VIDEO_INFO_IS_RGB (in_info) || GST_VIDEO_INFO_IS_GRAY (in_info)) { crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE; } else { switch (GST_VIDEO_INFO_FORMAT (in_info)) { case GST_VIDEO_FORMAT_AYUV: crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE; break; case GST_VIDEO_FORMAT_YVYU: case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_UYVY: crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX; if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_FORMAT_UYVY) { /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5] */ crop->macro_y_off = 1; } else { /* YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */ crop->macro_y_off = 0; } break; case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_YV12: crop->packing = VIDEO_CROP_PIXEL_FORMAT_PLANAR; break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: crop->packing = VIDEO_CROP_PIXEL_FORMAT_SEMI_PLANAR; break; default: goto unknown_format; } } crop->in_info = *in_info; crop->out_info = *out_info; return TRUE; /* ERROR */ cropping_too_much: { GST_WARNING_OBJECT (crop, "we are cropping too much"); return FALSE; } unknown_format: { GST_WARNING_OBJECT (crop, "Unsupported format"); return FALSE; } }
static gboolean gst_alpha_color_set_info (GstVideoFilter * filter, GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info) { GstAlphaColor *alpha = GST_ALPHA_COLOR (filter); gboolean in_sdtv, out_sdtv; alpha->process = NULL; alpha->matrix = NULL; if (GST_VIDEO_INFO_WIDTH (in_info) != GST_VIDEO_INFO_WIDTH (out_info) || GST_VIDEO_INFO_HEIGHT (in_info) != GST_VIDEO_INFO_HEIGHT (out_info)) goto invalid_caps; in_sdtv = in_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601; out_sdtv = out_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601; switch (GST_VIDEO_INFO_FORMAT (in_info)) { case GST_VIDEO_FORMAT_ARGB: switch (GST_VIDEO_INFO_FORMAT (out_info)) { case GST_VIDEO_FORMAT_ARGB: alpha->process = NULL; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_BGRA: alpha->process = transform_argb_bgra; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_ABGR: alpha->process = transform_argb_abgr; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_RGBA: alpha->process = transform_argb_rgba; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_AYUV: alpha->process = transform_argb_ayuv; alpha->matrix = out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv : cog_rgb_to_ycbcr_matrix_8bit_hdtv; break; default: alpha->process = NULL; alpha->matrix = NULL; break; } break; case GST_VIDEO_FORMAT_BGRA: switch (GST_VIDEO_INFO_FORMAT (out_info)) { case GST_VIDEO_FORMAT_BGRA: alpha->process = NULL; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_ARGB: alpha->process = transform_bgra_argb; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_ABGR: alpha->process = transform_bgra_abgr; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_RGBA: alpha->process = transform_bgra_rgba; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_AYUV: alpha->process = transform_bgra_ayuv; alpha->matrix = out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv : cog_rgb_to_ycbcr_matrix_8bit_hdtv; break; default: alpha->process = NULL; alpha->matrix = NULL; break; } break; case GST_VIDEO_FORMAT_ABGR: switch (GST_VIDEO_INFO_FORMAT (out_info)) { case GST_VIDEO_FORMAT_ABGR: alpha->process = NULL; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_RGBA: alpha->process = transform_abgr_rgba; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_ARGB: alpha->process = transform_abgr_argb; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_BGRA: alpha->process = transform_abgr_bgra; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_AYUV: alpha->process = transform_abgr_ayuv; alpha->matrix = out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv : cog_rgb_to_ycbcr_matrix_8bit_hdtv; break; default: alpha->process = NULL; alpha->matrix = NULL; break; } break; case GST_VIDEO_FORMAT_RGBA: switch (GST_VIDEO_INFO_FORMAT (out_info)) { case GST_VIDEO_FORMAT_RGBA: alpha->process = NULL; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_ARGB: alpha->process = transform_rgba_argb; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_ABGR: alpha->process = transform_rgba_abgr; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_BGRA: alpha->process = transform_rgba_bgra; alpha->matrix = NULL; break; case GST_VIDEO_FORMAT_AYUV: alpha->process = transform_rgba_ayuv; alpha->matrix = out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv : cog_rgb_to_ycbcr_matrix_8bit_hdtv; break; default: alpha->process = NULL; alpha->matrix = NULL; break; } break; case GST_VIDEO_FORMAT_AYUV: switch (GST_VIDEO_INFO_FORMAT (out_info)) { case GST_VIDEO_FORMAT_AYUV: if (in_sdtv == out_sdtv) { alpha->process = transform_ayuv_ayuv; alpha->matrix = NULL; } else { alpha->process = transform_ayuv_ayuv; alpha->matrix = out_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit : cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit; } break; case GST_VIDEO_FORMAT_ARGB: alpha->process = transform_ayuv_argb; alpha->matrix = in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv : cog_ycbcr_to_rgb_matrix_8bit_hdtv; break; case GST_VIDEO_FORMAT_BGRA: alpha->process = transform_ayuv_bgra; alpha->matrix = in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv : cog_ycbcr_to_rgb_matrix_8bit_hdtv; break; case GST_VIDEO_FORMAT_ABGR: alpha->process = transform_ayuv_abgr; alpha->matrix = in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv : cog_ycbcr_to_rgb_matrix_8bit_hdtv; break; case GST_VIDEO_FORMAT_RGBA: alpha->process = transform_ayuv_rgba; alpha->matrix = in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv : cog_ycbcr_to_rgb_matrix_8bit_hdtv; break; default: alpha->process = NULL; alpha->matrix = NULL; break; } break; default: alpha->process = NULL; alpha->matrix = NULL; break; } if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info) && in_sdtv == out_sdtv) gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE); else if (!alpha->process) goto no_process; return TRUE; /* ERRORS */ invalid_caps: { GST_DEBUG_OBJECT (alpha, "incomplete or invalid caps"); return FALSE; } no_process: { GST_DEBUG_OBJECT (alpha, "could not find process function"); return FALSE; } }