static GstCaps * gst_msdkvpp_fixate_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps, GstCaps * othercaps) { GstMsdkVPP *thiz = GST_MSDKVPP (trans); GstCaps *result = NULL; gboolean *use_dmabuf; if (direction == GST_PAD_SRC) { result = gst_caps_fixate (result); use_dmabuf = &thiz->use_sinkpad_dmabuf; } else { result = gst_msdkvpp_fixate_srccaps (thiz, caps, othercaps); use_dmabuf = &thiz->use_srcpad_dmabuf; } GST_DEBUG_OBJECT (trans, "fixated to %" GST_PTR_FORMAT, result); gst_caps_unref (othercaps); if (pad_can_dmabuf (thiz, direction == GST_PAD_SRC ? GST_PAD_SINK : GST_PAD_SRC, result)) { gst_caps_set_features (result, 0, gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_DMABUF, NULL)); *use_dmabuf = TRUE; } return result; }
static GstCaps * _set_caps_features_with_passthrough (const GstCaps * caps, const gchar * feature_name, GstCapsFeatures * passthrough) { guint i, j, m, n; GstCaps *tmp; tmp = gst_caps_new_empty (); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { GstCapsFeatures *features, *orig_features; GstStructure *s = gst_caps_get_structure (caps, i); orig_features = gst_caps_get_features (caps, i); features = gst_caps_features_new (feature_name, NULL); if (gst_caps_features_is_any (orig_features)) { /* if we have any features, we add both the features with and without @passthrough */ gst_caps_append_structure_full (tmp, gst_structure_copy (s), gst_caps_features_copy (features)); m = gst_caps_features_get_size (passthrough); for (j = 0; j < m; j++) { const gchar *feature = gst_caps_features_get_nth (passthrough, j); /* if we already have the features */ if (gst_caps_features_contains (features, feature)) continue; gst_caps_features_add (features, feature); } } else { m = gst_caps_features_get_size (orig_features); for (j = 0; j < m; j++) { const gchar *feature = gst_caps_features_get_nth (orig_features, j); /* if we already have the features */ if (gst_caps_features_contains (features, feature)) continue; if (g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY) == 0) continue; if (gst_caps_features_contains (passthrough, feature)) { gst_caps_features_add (features, feature); } } } gst_caps_append_structure_full (tmp, gst_structure_copy (s), features); } return tmp; }
static GstCaps * _set_caps_features (const GstCaps * caps, const gchar * feature_name) { GstCaps *tmp = gst_caps_copy (caps); guint n = gst_caps_get_size (tmp); guint i = 0; for (i = 0; i < n; i++) { GstCapsFeatures *features; features = gst_caps_features_new (feature_name, NULL); gst_caps_set_features (tmp, i, features); } return tmp; }
GstCaps * gst_vaapi_video_format_new_template_caps_with_features (GstVideoFormat format, const gchar * features_string) { GstCaps *caps; caps = gst_vaapi_video_format_new_template_caps (format); if (!caps) return NULL; GstCapsFeatures *const features = gst_caps_features_new (features_string, NULL); if (!features) { gst_caps_unref (caps); return NULL; } gst_caps_set_features (caps, 0, features); return caps; }
static GstCaps * _set_caps_features_with_passthrough (const GstCaps * caps, const gchar * feature_name, GstCapsFeatures * passthrough) { guint i, j, m, n; GstCaps *tmp; tmp = gst_caps_copy (caps); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { GstCapsFeatures *features, *orig_features; orig_features = gst_caps_get_features (caps, i); features = gst_caps_features_new (feature_name, NULL); m = gst_caps_features_get_size (orig_features); for (j = 0; j < m; j++) { const gchar *feature = gst_caps_features_get_nth (orig_features, j); /* if we already have the features */ if (gst_caps_features_contains (features, feature)) continue; if (g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY) == 0) continue; if (passthrough && gst_caps_features_contains (passthrough, feature)) { gst_caps_features_add (features, feature); } } gst_caps_set_features (tmp, i, features); } return tmp; }
static gboolean gst_vaapidecode_update_src_caps (GstVaapiDecode * decode) { GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode); GstPad *const srcpad = GST_VIDEO_DECODER_SRC_PAD (vdec); GstCaps *allowed; GstVideoCodecState *state, *ref_state; GstVaapiCapsFeature feature; GstCapsFeatures *features; GstCaps *allocation_caps; GstVideoInfo *vi; GstVideoFormat format; GstClockTime latency; gint fps_d, fps_n; guint width, height; const gchar *format_str, *feature_str; if (!decode->input_state) return FALSE; ref_state = decode->input_state; format = GST_VIDEO_INFO_FORMAT (&decode->decoded_info); allowed = gst_vaapidecode_get_allowed_srcpad_caps (decode); feature = gst_vaapi_find_preferred_caps_feature (srcpad, allowed, &format); gst_caps_unref (allowed); if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED) return FALSE; #if (!USE_GLX && !USE_EGL) /* This is a very pathological situation. Should not happen. */ if (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META) return FALSE; #endif if ((feature == GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY || feature == GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE) && format != GST_VIDEO_INFO_FORMAT (&decode->decoded_info)) { GST_FIXME_OBJECT (decode, "validate if driver can convert from %s to %s", gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&decode->decoded_info)), gst_video_format_to_string (format)); } width = decode->display_width; height = decode->display_height; if (!width || !height) { width = GST_VIDEO_INFO_WIDTH (&ref_state->info); height = GST_VIDEO_INFO_HEIGHT (&ref_state->info); } state = gst_video_decoder_set_output_state (vdec, format, width, height, ref_state); if (!state) return FALSE; if (GST_VIDEO_INFO_WIDTH (&state->info) == 0 || GST_VIDEO_INFO_HEIGHT (&state->info) == 0) { gst_video_codec_state_unref (state); return FALSE; } vi = &state->info; state->caps = gst_video_info_to_caps (vi); switch (feature) { case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META: case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE:{ GstStructure *structure = gst_caps_get_structure (state->caps, 0); /* Remove chroma-site and colorimetry from src caps, * which is unnecessary on downstream if using VASurface */ gst_structure_remove_fields (structure, "chroma-site", "colorimetry", NULL); feature_str = gst_vaapi_caps_feature_to_string (feature); features = gst_caps_features_new (feature_str, NULL); gst_caps_set_features (state->caps, 0, features); break; } default: break; } /* Allocation query is different from pad's caps */ allocation_caps = NULL; if (GST_VIDEO_INFO_WIDTH (&decode->decoded_info) != width || GST_VIDEO_INFO_HEIGHT (&decode->decoded_info) != height) { allocation_caps = gst_caps_copy (state->caps); format_str = gst_video_format_to_string (format); gst_caps_set_simple (allocation_caps, "width", G_TYPE_INT, GST_VIDEO_INFO_WIDTH (&decode->decoded_info), "height", G_TYPE_INT, GST_VIDEO_INFO_HEIGHT (&decode->decoded_info), "format", G_TYPE_STRING, format_str, NULL); GST_INFO_OBJECT (decode, "new alloc caps = %" GST_PTR_FORMAT, allocation_caps); } gst_caps_replace (&state->allocation_caps, allocation_caps); if (allocation_caps) gst_caps_unref (allocation_caps); GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps); gst_caps_replace (&decode->srcpad_caps, state->caps); gst_video_codec_state_unref (state); fps_n = GST_VIDEO_INFO_FPS_N (vi); fps_d = GST_VIDEO_INFO_FPS_D (vi); if (fps_n <= 0 || fps_d <= 0) { GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation"); fps_n = 25; fps_d = 1; } /* For parsing/preparation purposes we'd need at least 1 frame * latency in general, with perfectly known unit boundaries (NALU, * AU), and up to 2 frames when we need to wait for the second frame * start to determine the first frame is complete */ latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n); gst_video_decoder_set_latency (vdec, latency, latency); return TRUE; }
static gboolean gst_vaapidecode_update_src_caps (GstVaapiDecode * decode) { GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode); GstVideoCodecState *state, *ref_state; GstVideoInfo *vi; GstVideoFormat format = GST_VIDEO_FORMAT_I420; if (!decode->input_state) return FALSE; ref_state = decode->input_state; GstCapsFeatures *features = NULL; GstVaapiCapsFeature feature; feature = gst_vaapi_find_preferred_caps_feature (GST_VIDEO_DECODER_SRC_PAD (vdec), GST_VIDEO_INFO_FORMAT (&ref_state->info), &format); if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED) return FALSE; switch (feature) { #if (USE_GLX || USE_EGL) case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META: features = gst_caps_features_new (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL); break; #endif #if GST_CHECK_VERSION(1,3,1) case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE: features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_VAAPI_SURFACE, NULL); break; #endif default: break; } state = gst_video_decoder_set_output_state (vdec, format, ref_state->info.width, ref_state->info.height, ref_state); if (!state || state->info.width == 0 || state->info.height == 0) return FALSE; vi = &state->info; state->caps = gst_video_info_to_caps (vi); if (features) gst_caps_set_features (state->caps, 0, features); GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps); gst_caps_replace (&decode->srcpad_caps, state->caps); gst_video_codec_state_unref (state); gint fps_n = GST_VIDEO_INFO_FPS_N (vi); gint fps_d = GST_VIDEO_INFO_FPS_D (vi); if (fps_n <= 0 || fps_d <= 0) { GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation"); fps_n = 25; fps_d = 1; } /* For parsing/preparation purposes we'd need at least 1 frame * latency in general, with perfectly known unit boundaries (NALU, * AU), and up to 2 frames when we need to wait for the second frame * start to determine the first frame is complete */ GstClockTime latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n); gst_video_decoder_set_latency (vdec, latency, latency); return TRUE; }
GstCaps * gst_egl_adaptation_fill_supported_fbuffer_configs (GstEglAdaptationContext * ctx) { GstCaps *caps = NULL, *copy1, *copy2; guint i, n; GST_DEBUG_OBJECT (ctx->element, "Building initial list of wanted eglattribs per format"); /* Init supported format/caps list */ if (_gst_egl_choose_config (ctx, TRUE, NULL)) { caps = gst_caps_new_empty (); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_RGBA)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_BGRA)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_ARGB)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_ABGR)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_RGBx)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_BGRx)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_xRGB)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_xBGR)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_AYUV)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_Y444)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_RGB)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_BGR)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_I420)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_YV12)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_NV12)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_NV21)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_Y42B)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_Y41B)); gst_caps_append (caps, _gst_video_format_new_template_caps (GST_VIDEO_FORMAT_RGB16)); copy1 = gst_caps_copy (caps); copy2 = gst_caps_copy (caps); #ifndef HAVE_IOS n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { GstCapsFeatures *features = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_EGL_IMAGE, NULL); gst_caps_set_features (caps, i, features); } #endif n = gst_caps_get_size (copy1); for (i = 0; i < n; i++) { GstCapsFeatures *features = gst_caps_features_new (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL); gst_caps_set_features (copy1, i, features); } gst_caps_append (caps, copy1); gst_caps_append (caps, copy2); } else { GST_INFO_OBJECT (ctx->element, "EGL display doesn't support RGBA8888 config"); } return caps; }
static GstCaps * gst_mfxpostproc_transform_caps_impl (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstMfxPostproc *const vpp = GST_MFXPOSTPROC (trans); GstVideoInfo vi, peer_vi; GstVideoFormat out_format; GstCaps *out_caps, *peer_caps; GstMfxCapsFeature feature; const gchar *feature_str; guint width, height; /* Generate the sink pad caps, that could be fixated afterwards */ if (direction == GST_PAD_SRC) { if (!ensure_allowed_sinkpad_caps (vpp)) return NULL; return gst_caps_ref (vpp->allowed_sinkpad_caps); } /* Generate complete set of src pad caps if non-fixated sink pad * caps are provided */ if (!gst_caps_is_fixed (caps)) { if (!ensure_allowed_srcpad_caps (vpp)) return NULL; return gst_caps_ref (vpp->allowed_srcpad_caps); } /* Generate the expected src pad caps, from the current fixated * sink pad caps */ if (!gst_video_info_from_caps (&vi, caps)) return NULL; if (vpp->deinterlace_mode) GST_VIDEO_INFO_INTERLACE_MODE (&vi) = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; /* Update size from user-specified parameters */ find_best_size (vpp, &vi, &width, &height); /* Update format from user-specified parameters */ peer_caps = gst_pad_peer_query_caps (GST_BASE_TRANSFORM_SRC_PAD (trans), vpp->allowed_srcpad_caps); if (gst_caps_is_any (peer_caps) || gst_caps_is_empty (peer_caps)) return peer_caps; if (!gst_caps_is_fixed (peer_caps)) peer_caps = gst_caps_fixate (peer_caps); gst_video_info_from_caps (&peer_vi, peer_caps); out_format = GST_VIDEO_INFO_FPS_N (&peer_vi); /* Update width and height from the caps */ if (GST_VIDEO_INFO_HEIGHT (&peer_vi) != 1 && GST_VIDEO_INFO_WIDTH (&peer_vi) != 1) find_best_size(vpp, &peer_vi, &width, &height); if (vpp->format != DEFAULT_FORMAT) out_format = vpp->format; if (vpp->fps_n) { GST_VIDEO_INFO_FPS_N (&vi) = vpp->fps_n; GST_VIDEO_INFO_FPS_D (&vi) = vpp->fps_d; vpp->field_duration = gst_util_uint64_scale (GST_SECOND, vpp->fps_d, vpp->fps_n); if (DEFAULT_FRC_ALG == vpp->alg) vpp->alg = GST_MFX_FRC_PRESERVE_TIMESTAMP; } if (peer_caps) gst_caps_unref (peer_caps); feature = gst_mfx_find_preferred_caps_feature (GST_BASE_TRANSFORM_SRC_PAD (trans), &out_format); gst_video_info_change_format (&vi, out_format, width, height); out_caps = gst_video_info_to_caps (&vi); if (!out_caps) return NULL; if (feature) { feature_str = gst_mfx_caps_feature_to_string (feature); if (feature_str) gst_caps_set_features (out_caps, 0, gst_caps_features_new (feature_str, NULL)); } if (vpp->format != out_format) vpp->format = out_format; return out_caps; }
static void gst_droidcamsrc_stream_window_reset_buffer_pool_locked (GstDroidCamSrcStreamWindow * win) { GstStructure *config; GstCaps *caps; GstCapsFeatures *feature; GST_DEBUG ("stream window configure buffer pool"); if (win->pool) { /* we will ignore the error here */ if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (win->pool), FALSE)) { GST_WARNING ("Failed to deactivate buffer pool"); } gst_object_unref (win->pool); } win->pool = gst_droid_cam_src_buffer_pool_new (win->info); if (!win->count || !win->width || !win->height || !win->usage || !win->format) { GST_ERROR ("incomplete configuration"); goto clean_and_out; } config = gst_buffer_pool_get_config (GST_BUFFER_POOL (win->pool)); if (!config) { GST_ERROR ("failed to get buffer pool config"); goto clean_and_out; } /* TODO: 30 is hardcoded */ caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "ENCODED", "width", G_TYPE_INT, win->width, "height", G_TYPE_INT, win->height, "framerate", GST_TYPE_FRACTION, 30, 1, NULL); feature = gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_DROID_HANDLE, NULL); gst_caps_set_features (caps, 0, feature); gst_buffer_pool_config_set_params (config, caps, 0, win->count, win->count); gst_buffer_pool_config_set_allocator (config, win->allocator, NULL); gst_structure_set (config, GST_DROIDCAMSRC_BUFFER_POOL_USAGE_KEY, G_TYPE_INT, win->usage, GST_DROIDCAMSRC_BUFFER_POOL_WIDTH_KEY, G_TYPE_INT, win->width, GST_DROIDCAMSRC_BUFFER_POOL_HEIGHT_KEY, G_TYPE_INT, win->height, GST_DROIDCAMSRC_BUFFER_POOL_FORMAT_KEY, G_TYPE_INT, win->format, NULL); gst_caps_unref (caps); if (!gst_buffer_pool_set_config (GST_BUFFER_POOL (win->pool), config)) { GST_ERROR ("failed to set buffer pool config"); goto clean_and_out; } if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (win->pool), TRUE)) { GST_ERROR ("failed to activate buffer pool"); goto clean_and_out; } win->needs_reconfigure = FALSE; return; clean_and_out: if (win->pool) { gst_object_unref (win->pool); win->pool = NULL; } }
static gboolean gst_msdkdec_set_src_caps (GstMsdkDec * thiz, gboolean need_allocation) { GstVideoCodecState *output_state; GstVideoInfo *vinfo; GstVideoAlignment align; GstCaps *allocation_caps = NULL; GstVideoFormat format; guint width, height; const gchar *format_str; /* use display width and display height in output state which * will be using for caps negotiation */ width = thiz->param.mfx.FrameInfo.CropW ? thiz->param.mfx. FrameInfo.CropW : GST_VIDEO_INFO_WIDTH (&thiz->input_state->info); height = thiz->param.mfx.FrameInfo.CropH ? thiz->param.mfx. FrameInfo.CropH : GST_VIDEO_INFO_HEIGHT (&thiz->input_state->info); format = gst_msdk_get_video_format_from_mfx_fourcc (thiz->param.mfx. FrameInfo.FourCC); if (format == GST_VIDEO_FORMAT_UNKNOWN) { GST_WARNING_OBJECT (thiz, "Failed to find a valid video format\n"); return FALSE; } output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (thiz), format, width, height, thiz->input_state); if (!output_state) return FALSE; /* Ensure output_state->caps and info has same width and height * Also mandate the 32 bit alignment */ vinfo = &output_state->info; gst_msdk_set_video_alignment (vinfo, &align); gst_video_info_align (vinfo, &align); output_state->caps = gst_video_info_to_caps (vinfo); if (srcpad_can_dmabuf (thiz)) gst_caps_set_features (output_state->caps, 0, gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_DMABUF, NULL)); thiz->output_info = output_state->info; if (need_allocation) { /* Find allocation width and height */ width = GST_ROUND_UP_16 (thiz->param.mfx.FrameInfo.Width ? thiz->param.mfx. FrameInfo.Width : GST_VIDEO_INFO_WIDTH (&output_state->info)); height = GST_ROUND_UP_32 (thiz->param.mfx.FrameInfo.Height ? thiz->param.mfx. FrameInfo.Height : GST_VIDEO_INFO_HEIGHT (&output_state->info)); /* set allocation width and height in allocation_caps * which may or may not be similar to the output_state caps */ allocation_caps = gst_caps_copy (output_state->caps); format_str = gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&thiz->output_info)); gst_caps_set_simple (allocation_caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "format", G_TYPE_STRING, format_str, NULL); GST_INFO_OBJECT (thiz, "new alloc caps = %" GST_PTR_FORMAT, allocation_caps); gst_caps_replace (&thiz->allocation_caps, allocation_caps); } else { /* We keep the allocation parameters as it is to avoid pool renegotiation. * For codecs like VP9, dynamic resolution change doesn't requires allocation * reset if the new video frame resolution is lower than the * already configured one */ allocation_caps = gst_caps_copy (thiz->allocation_caps); } gst_caps_replace (&output_state->allocation_caps, allocation_caps); if (allocation_caps) gst_caps_unref (allocation_caps); gst_video_codec_state_unref (output_state); return TRUE; }