/** * gst_video_info_is_equal: * @info: a #GstVideoInfo * @other: a #GstVideoInfo * * Compares two #GstVideoInfo and returns whether they are equal or not * * Returns: %TRUE if @info and @other are equal, else %FALSE. */ gboolean gst_video_info_is_equal (const GstVideoInfo * info, const GstVideoInfo * other) { gint i; if (GST_VIDEO_INFO_FORMAT (info) != GST_VIDEO_INFO_FORMAT (other)) return FALSE; if (GST_VIDEO_INFO_INTERLACE_MODE (info) != GST_VIDEO_INFO_INTERLACE_MODE (other)) return FALSE; if (GST_VIDEO_INFO_FLAGS (info) != GST_VIDEO_INFO_FLAGS (other)) return FALSE; if (GST_VIDEO_INFO_WIDTH (info) != GST_VIDEO_INFO_WIDTH (other)) return FALSE; if (GST_VIDEO_INFO_HEIGHT (info) != GST_VIDEO_INFO_HEIGHT (other)) return FALSE; if (GST_VIDEO_INFO_SIZE (info) != GST_VIDEO_INFO_SIZE (other)) return FALSE; if (GST_VIDEO_INFO_PAR_N (info) != GST_VIDEO_INFO_PAR_N (other)) return FALSE; if (GST_VIDEO_INFO_PAR_D (info) != GST_VIDEO_INFO_PAR_D (other)) return FALSE; if (GST_VIDEO_INFO_FPS_N (info) != GST_VIDEO_INFO_FPS_N (other)) return FALSE; if (GST_VIDEO_INFO_FPS_D (info) != GST_VIDEO_INFO_FPS_D (other)) return FALSE; if (!gst_video_colorimetry_is_equal (&GST_VIDEO_INFO_COLORIMETRY (info), &GST_VIDEO_INFO_COLORIMETRY (other))) return FALSE; if (GST_VIDEO_INFO_CHROMA_SITE (info) != GST_VIDEO_INFO_CHROMA_SITE (other)) return FALSE; if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_INFO_MULTIVIEW_MODE (other)) return FALSE; if (GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) != GST_VIDEO_INFO_MULTIVIEW_FLAGS (other)) return FALSE; if (GST_VIDEO_INFO_VIEWS (info) != GST_VIDEO_INFO_VIEWS (other)) return FALSE; for (i = 0; i < info->finfo->n_planes; i++) { if (info->stride[i] != other->stride[i]) return FALSE; if (info->offset[i] != other->offset[i]) return FALSE; } return TRUE; }
gboolean gst_caps_set_interlaced (GstCaps * caps, GstVideoInfo * vip) { GstVideoInterlaceMode mode; const gchar *mode_str; mode = vip ? GST_VIDEO_INFO_INTERLACE_MODE (vip) : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; switch (mode) { case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE: mode_str = "progressive"; break; case GST_VIDEO_INTERLACE_MODE_INTERLEAVED: mode_str = "interleaved"; break; case GST_VIDEO_INTERLACE_MODE_MIXED: mode_str = "mixed"; break; default: GST_ERROR ("unsupported `interlace-mode' %d", mode); return FALSE; } gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING, mode_str, NULL); return TRUE; }
void gst_video_info_change_format (GstVideoInfo * vip, GstVideoFormat format, guint width, guint height) { GstVideoInfo vi = *vip; gst_video_info_set_format (vip, format, width, height); GST_VIDEO_INFO_INTERLACE_MODE (vip) = GST_VIDEO_INFO_INTERLACE_MODE (&vi); GST_VIDEO_FORMAT_INFO_FLAGS (vip) = GST_VIDEO_FORMAT_INFO_FLAGS (&vi); GST_VIDEO_INFO_VIEWS (vip) = GST_VIDEO_INFO_VIEWS (&vi); GST_VIDEO_INFO_PAR_N (vip) = GST_VIDEO_INFO_PAR_N (&vi); GST_VIDEO_INFO_PAR_D (vip) = GST_VIDEO_INFO_PAR_D (&vi); GST_VIDEO_INFO_FPS_N (vip) = GST_VIDEO_INFO_FPS_N (&vi); GST_VIDEO_INFO_FPS_D (vip) = GST_VIDEO_INFO_FPS_D (&vi); GST_VIDEO_INFO_MULTIVIEW_MODE (vip) = GST_VIDEO_INFO_MULTIVIEW_MODE (&vi); GST_VIDEO_INFO_MULTIVIEW_FLAGS (vip) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (&vi); }
static void gst_raw_video_parse_set_property (GObject * object, guint prop_id, GValue const *value, GParamSpec * pspec) { GstBaseParse *base_parse = GST_BASE_PARSE (object); GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object); GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (object); GstRawVideoParseConfig *props_cfg = &(raw_video_parse->properties_config); /* All properties are handled similarly: * - if the new value is the same as the current value, nothing is done * - the parser lock is held while the new value is set * - if the properties config is the current config, the source caps are * invalidated to ensure that the code in handle_frame pushes a new CAPS * event out * - properties that affect the video frame size call the function to update * the info and also call gst_base_parse_set_min_frame_size() to ensure * that the minimum frame size can hold 1 frame (= one sample for each * channel); to ensure that the min frame size includes any extra padding, * it is set to the result of gst_raw_video_parse_get_config_frame_size() * - property configuration values that require video info updates aren't * written directory into the video info structure, but in the extra * fields instead (gst_raw_video_parse_update_info() then copies the values * from these fields into the video info); see the documentation inside * gst_raw_video_parse_update_info() for the reason why */ switch (prop_id) { case PROP_WIDTH: { gint new_width = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_width != props_cfg->width) { props_cfg->width = new_width; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_HEIGHT: { gint new_height = g_value_get_int (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_height != props_cfg->height) { props_cfg->height = new_height; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FORMAT: { GstVideoFormat new_format = g_value_get_enum (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); if (new_format != props_cfg->format) { props_cfg->format = new_format; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) { gst_raw_base_parse_invalidate_src_caps (raw_base_parse); gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); } } GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PIXEL_ASPECT_RATIO: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* The pixel aspect ratio does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->pixel_aspect_ratio_n = GST_VIDEO_INFO_PAR_N (&(props_cfg->info)) = gst_value_get_fraction_numerator (value); props_cfg->pixel_aspect_ratio_d = GST_VIDEO_INFO_PAR_D (&(props_cfg->info)) = gst_value_get_fraction_denominator (value); GST_DEBUG_OBJECT (raw_video_parse, "setting pixel aspect ratio to %u/%u", props_cfg->pixel_aspect_ratio_n, props_cfg->pixel_aspect_ratio_d); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FRAMERATE: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* The framerate does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->framerate_n = GST_VIDEO_INFO_FPS_N (&(props_cfg->info)) = gst_value_get_fraction_numerator (value); props_cfg->framerate_d = GST_VIDEO_INFO_FPS_D (&(props_cfg->info)) = gst_value_get_fraction_denominator (value); GST_DEBUG_OBJECT (raw_video_parse, "setting framerate to %u/%u", props_cfg->framerate_n, props_cfg->framerate_d); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_INTERLACED: { GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); /* Interlacing does not affect the video frame size, * so it is just set directly without any updates */ props_cfg->interlaced = g_value_get_boolean (value); GST_VIDEO_INFO_INTERLACE_MODE (&(props_cfg->info)) = props_cfg->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_TOP_FIELD_FIRST: { /* The top-field-first flag is a detail related to * interlacing, so no video info update is needed */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); props_cfg->top_field_first = g_value_get_boolean (value); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PLANE_STRIDES: { GValueArray *valarray = g_value_get_boxed (value); guint n_planes; guint i; /* If no valarray is given, then disable custom * plane strides & offsets and stick to the * standard computed ones */ if (valarray == NULL) { GST_DEBUG_OBJECT (raw_video_parse, "custom plane strides & offsets disabled"); props_cfg->custom_plane_strides = FALSE; gst_raw_video_parse_update_info (props_cfg); break; } /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("plane strides property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info)); /* Check that the valarray holds the right number of values */ if (valarray->n_values != n_planes) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("incorrect number of elements in plane strides property"), ("expected: %u, got: %u", n_planes, valarray->n_values)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } /* Copy the values to the stride array */ for (i = 0; i < n_planes; ++i) { GValue *val = g_value_array_get_nth (valarray, i); props_cfg->plane_strides[i] = g_value_get_uint (val); GST_DEBUG_OBJECT (raw_video_parse, "plane #%u stride: %d", i, props_cfg->plane_strides[i]); } props_cfg->custom_plane_strides = TRUE; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_PLANE_OFFSETS: { GValueArray *valarray = g_value_get_boxed (value); guint n_planes; guint i; /* If no valarray is given, then disable custom * plane strides & offsets and stick to the * standard computed ones */ if (valarray == NULL) { GST_DEBUG_OBJECT (raw_video_parse, "custom plane strides & offsets disabled"); props_cfg->custom_plane_strides = FALSE; gst_raw_video_parse_update_info (props_cfg); break; } /* Sanity check - reject empty arrays */ if ((valarray != NULL) && (valarray->n_values == 0)) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("plane offsets property holds an empty array"), (NULL)); break; } GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info)); /* Check that the valarray holds the right number of values */ if (valarray->n_values != n_planes) { GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS, ("incorrect number of elements in plane offsets property"), ("expected: %u, got: %u", n_planes, valarray->n_values)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } /* Copy the values to the offset array */ for (i = 0; i < n_planes; ++i) { GValue *val = g_value_array_get_nth (valarray, i); props_cfg->plane_offsets[i] = g_value_get_uint (val); GST_DEBUG_OBJECT (raw_video_parse, "plane #%u offset: %" G_GSIZE_FORMAT, i, props_cfg->plane_offsets[i]); } props_cfg->custom_plane_strides = TRUE; gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } case PROP_FRAME_STRIDE: { /* The frame stride does not affect the video frame size, * so it is just set directly without any updates */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object); props_cfg->frame_stride = g_value_get_uint (value); gst_raw_video_parse_update_info (props_cfg); if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) gst_base_parse_set_min_frame_size (base_parse, gst_raw_video_parse_get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)); GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_raw_video_parse_update_info (GstRawVideoParseConfig * config) { guint i; guint n_planes; guint last_plane; gsize last_plane_offset, last_plane_size; GstVideoInfo *info = &(config->info); GST_DEBUG ("updating info with width %u height %u format %s " " custom plane strides&offsets %d", config->width, config->height, gst_video_format_to_string (config->format), config->custom_plane_strides); gst_video_info_set_format (info, config->format, config->width, config->height); GST_VIDEO_INFO_PAR_N (info) = config->pixel_aspect_ratio_n; GST_VIDEO_INFO_PAR_D (info) = config->pixel_aspect_ratio_d; GST_VIDEO_INFO_FPS_N (info) = config->framerate_n; GST_VIDEO_INFO_FPS_D (info) = config->framerate_d; GST_VIDEO_INFO_INTERLACE_MODE (info) = config->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; /* Check if there are custom plane strides & offsets that need to be preserved */ if (config->custom_plane_strides) { /* In case there are, overwrite the offsets&strides computed by * gst_video_info_set_format with the custom ones */ for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) { GST_VIDEO_INFO_PLANE_OFFSET (info, i) = config->plane_offsets[i]; GST_VIDEO_INFO_PLANE_STRIDE (info, i) = config->plane_strides[i]; } } else { /* No custom planes&offsets; copy the computed ones into * the plane_offsets & plane_strides arrays to ensure they * are equal to the ones in the videoinfo */ for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) { config->plane_offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (info, i); config->plane_strides[i] = GST_VIDEO_INFO_PLANE_STRIDE (info, i); } } n_planes = GST_VIDEO_INFO_N_PLANES (info); if (n_planes < 1) n_planes = 1; /* Figure out what plane is the physically last one. Typically * this is the last plane in the list (= at index n_planes-1). * However, this is not guaranteed, so we have to scan the offsets * to find the last plane. */ last_plane_offset = 0; last_plane = 0; for (i = 0; i < n_planes; ++i) { gsize plane_offset = GST_VIDEO_INFO_PLANE_OFFSET (info, i); if (plane_offset >= last_plane_offset) { last_plane = i; last_plane_offset = plane_offset; } } last_plane_size = GST_VIDEO_INFO_PLANE_STRIDE (info, last_plane) * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info->finfo, last_plane, config->height); GST_VIDEO_INFO_SIZE (info) = last_plane_offset + last_plane_size; GST_DEBUG ("last plane #%u: offset: %" G_GSIZE_FORMAT " size: %" G_GSIZE_FORMAT " => frame size minus extra padding: %" G_GSIZE_FORMAT, last_plane, last_plane_offset, last_plane_size, GST_VIDEO_INFO_SIZE (info)); }
static gboolean gst_imx_ipu_blitter_set_input_frame(GstImxBlitter *blitter, GstBuffer *input_frame) { GstImxIpuBlitter *ipu_blitter = GST_IMX_IPU_BLITTER(blitter); gst_buffer_replace(&(ipu_blitter->input_frame), input_frame); if (ipu_blitter->input_frame != NULL) { ipu_blitter->priv->main_task.input.deinterlace.enable = 0; if (ipu_blitter->deinterlacing_enabled) { switch (GST_VIDEO_INFO_INTERLACE_MODE(&(ipu_blitter->input_video_info))) { case GST_VIDEO_INTERLACE_MODE_INTERLEAVED: GST_LOG_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled"); ipu_blitter->priv->main_task.input.deinterlace.enable = 1; break; case GST_VIDEO_INTERLACE_MODE_MIXED: { if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_INTERLACED)) { GST_LOG_OBJECT(ipu_blitter, "frame has deinterlacing flag"); ipu_blitter->priv->main_task.input.deinterlace.enable = 1; } else GST_LOG_OBJECT(ipu_blitter, "frame has no deinterlacing flag"); break; } case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE: GST_LOG_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary"); break; case GST_VIDEO_INTERLACE_MODE_FIELDS: GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)"); break; default: GST_LOG_OBJECT(ipu_blitter, "input stream uses unknown interlacing mode -> no deinterlacing performed"); } } if (ipu_blitter->priv->main_task.input.deinterlace.enable) { if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_TFF)) { GST_LOG_OBJECT(ipu_blitter, "interlaced with top field first"); ipu_blitter->priv->main_task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_TOP; } else { GST_LOG_OBJECT(ipu_blitter, "interlaced with bottom field first"); ipu_blitter->priv->main_task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_BOTTOM; } ipu_blitter->priv->main_task.input.deinterlace.motion = HIGH_MOTION; } else ipu_blitter->priv->main_task.input.deinterlace.motion = MED_MOTION; gst_imx_ipu_blitter_set_task_params(ipu_blitter, input_frame, &(ipu_blitter->priv->main_task), &(ipu_blitter->input_video_info), TRUE); if (ipu_blitter->use_entire_input_frame) { ipu_blitter->priv->main_task.input.crop.pos.x = 0; ipu_blitter->priv->main_task.input.crop.pos.y = 0; ipu_blitter->priv->main_task.input.crop.w = GST_VIDEO_INFO_WIDTH(&(ipu_blitter->input_video_info)); ipu_blitter->priv->main_task.input.crop.h = GST_VIDEO_INFO_HEIGHT(&(ipu_blitter->input_video_info)); } } return TRUE; }
static GstCaps * gst_mfxpostproc_transform_caps_impl (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstMfxPostproc *const vpp = GST_MFXPOSTPROC (trans); GstVideoInfo vi, peer_vi; GstVideoFormat out_format; GstCaps *out_caps, *peer_caps; GstMfxCapsFeature feature; const gchar *feature_str; guint width, height; /* Generate the sink pad caps, that could be fixated afterwards */ if (direction == GST_PAD_SRC) { if (!ensure_allowed_sinkpad_caps (vpp)) return NULL; return gst_caps_ref (vpp->allowed_sinkpad_caps); } /* Generate complete set of src pad caps if non-fixated sink pad * caps are provided */ if (!gst_caps_is_fixed (caps)) { if (!ensure_allowed_srcpad_caps (vpp)) return NULL; return gst_caps_ref (vpp->allowed_srcpad_caps); } /* Generate the expected src pad caps, from the current fixated * sink pad caps */ if (!gst_video_info_from_caps (&vi, caps)) return NULL; if (vpp->deinterlace_mode) GST_VIDEO_INFO_INTERLACE_MODE (&vi) = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; /* Update size from user-specified parameters */ find_best_size (vpp, &vi, &width, &height); /* Update format from user-specified parameters */ peer_caps = gst_pad_peer_query_caps (GST_BASE_TRANSFORM_SRC_PAD (trans), vpp->allowed_srcpad_caps); if (gst_caps_is_any (peer_caps) || gst_caps_is_empty (peer_caps)) return peer_caps; if (!gst_caps_is_fixed (peer_caps)) peer_caps = gst_caps_fixate (peer_caps); gst_video_info_from_caps (&peer_vi, peer_caps); out_format = GST_VIDEO_INFO_FPS_N (&peer_vi); /* Update width and height from the caps */ if (GST_VIDEO_INFO_HEIGHT (&peer_vi) != 1 && GST_VIDEO_INFO_WIDTH (&peer_vi) != 1) find_best_size(vpp, &peer_vi, &width, &height); if (vpp->format != DEFAULT_FORMAT) out_format = vpp->format; if (vpp->fps_n) { GST_VIDEO_INFO_FPS_N (&vi) = vpp->fps_n; GST_VIDEO_INFO_FPS_D (&vi) = vpp->fps_d; vpp->field_duration = gst_util_uint64_scale (GST_SECOND, vpp->fps_d, vpp->fps_n); if (DEFAULT_FRC_ALG == vpp->alg) vpp->alg = GST_MFX_FRC_PRESERVE_TIMESTAMP; } if (peer_caps) gst_caps_unref (peer_caps); feature = gst_mfx_find_preferred_caps_feature (GST_BASE_TRANSFORM_SRC_PAD (trans), &out_format); gst_video_info_change_format (&vi, out_format, width, height); out_caps = gst_video_info_to_caps (&vi); if (!out_caps) return NULL; if (feature) { feature_str = gst_mfx_caps_feature_to_string (feature); if (feature_str) gst_caps_set_features (out_caps, 0, gst_caps_features_new (feature_str, NULL)); } if (vpp->format != out_format) vpp->format = out_format; return out_caps; }