static void print_caps (const GstCaps * caps, const gchar * pfx) { guint i; g_return_if_fail (caps != NULL); if (gst_caps_is_any (caps)) { n_print ("%sANY\n", pfx); return; } if (gst_caps_is_empty (caps)) { n_print ("%sEMPTY\n", pfx); return; } for (i = 0; i < gst_caps_get_size (caps); i++) { GstStructure *structure = gst_caps_get_structure (caps, i); GstCapsFeatures *features = gst_caps_get_features (caps, i); if (features && (gst_caps_features_is_any (features) || !gst_caps_features_is_equal (features, GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY))) { gchar *features_string = gst_caps_features_to_string (features); n_print ("%s%s(%s)\n", pfx, gst_structure_get_name (structure), features_string); g_free (features_string); } else { n_print ("%s%s\n", pfx, gst_structure_get_name (structure)); } gst_structure_foreach (structure, print_field, (gpointer) pfx); } }
/** * gst_dvbsub_overlay_intersect_by_feature: * * Creates a new #GstCaps based on the following filtering rule. * * For each individual caps contained in given caps, if the * caps uses the given caps feature, keep a version of the caps * with the feature and an another one without. Otherwise, intersect * the caps with the given filter. * * Returns: the new #GstCaps */ static GstCaps * gst_dvbsub_overlay_intersect_by_feature (GstCaps * caps, const gchar * feature, GstCaps * filter) { int i, caps_size; GstCaps *new_caps; new_caps = gst_caps_new_empty (); caps_size = gst_caps_get_size (caps); for (i = 0; i < caps_size; i++) { GstStructure *caps_structure = gst_caps_get_structure (caps, i); GstCapsFeatures *caps_features = gst_caps_features_copy (gst_caps_get_features (caps, i)); GstCaps *filtered_caps; GstCaps *simple_caps = gst_caps_new_full (gst_structure_copy (caps_structure), NULL); gst_caps_set_features (simple_caps, 0, caps_features); if (gst_caps_features_contains (caps_features, feature)) { gst_caps_append (new_caps, gst_caps_copy (simple_caps)); gst_caps_features_remove (caps_features, feature); filtered_caps = gst_caps_ref (simple_caps); } else { filtered_caps = gst_caps_intersect_full (simple_caps, filter, GST_CAPS_INTERSECT_FIRST); } gst_caps_unref (simple_caps); gst_caps_append (new_caps, filtered_caps); } return new_caps; }
static GstCaps * gst_gl_mixer_set_caps_features (const GstCaps * caps, const gchar * feature_name) { GstCaps *tmp = gst_caps_copy (caps); guint n = gst_caps_get_size (tmp); guint i = 0; for (i = 0; i < n; i++) { GstCapsFeatures *features = gst_caps_get_features (tmp, i); if (features) { guint n_f = gst_caps_features_get_size (features); guint j = 0; for (j = 0; j < n_f; j++) { gst_caps_features_remove_id (features, gst_caps_features_get_nth_id (features, j)); } } gst_caps_features_add (features, feature_name); gst_caps_set_simple (tmp, "format", G_TYPE_STRING, "RGBA", NULL); } return tmp; }
/* Checks whether the supplied caps contain VA surfaces */ gboolean gst_caps_has_vaapi_surface (GstCaps * caps) { gboolean found_caps = FALSE; guint i, num_structures; g_return_val_if_fail (caps != NULL, FALSE); num_structures = gst_caps_get_size (caps); if (num_structures < 1) return FALSE; for (i = 0; i < num_structures && !found_caps; i++) { GstCapsFeatures *const features = gst_caps_get_features (caps, i); #if GST_CHECK_VERSION(1,3,0) /* Skip ANY features, we need an exact match for correct evaluation */ if (gst_caps_features_is_any (features)) continue; #endif found_caps = gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_VAAPI_SURFACE); } return found_caps; }
/* copies the given caps */ static GstCaps * gst_video_convert_caps_remove_format_info (GstCaps * caps) { GstStructure *st; GstCapsFeatures *f; gint i, n; GstCaps *res; res = gst_caps_new_empty (); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { st = gst_caps_get_structure (caps, i); f = gst_caps_get_features (caps, i); /* If this is already expressed by the existing caps * skip this structure */ if (i > 0 && gst_caps_is_subset_structure_full (res, st, f)) continue; st = gst_structure_copy (st); /* Only remove format info for the cases when we can actually convert */ if (!gst_caps_features_is_any (f) && gst_caps_features_is_equal (f, GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)) gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site", NULL); gst_caps_append_structure_full (res, st, gst_caps_features_copy (f)); } return res; }
static GstCaps * gst_video_scale_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter) { GstCaps *ret; GstStructure *structure; GstCapsFeatures *features; gint i, n; GST_DEBUG_OBJECT (trans, "Transforming caps %" GST_PTR_FORMAT " in direction %s", caps, (direction == GST_PAD_SINK) ? "sink" : "src"); ret = gst_caps_new_empty (); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { structure = gst_caps_get_structure (caps, i); features = gst_caps_get_features (caps, i); /* If this is already expressed by the existing caps * skip this structure */ if (i > 0 && gst_caps_is_subset_structure_full (ret, structure, features)) continue; /* make copy */ structure = gst_structure_copy (structure); /* If the features are non-sysmem we can only do passthrough */ if (!gst_caps_features_is_any (features) && gst_caps_features_is_equal (features, GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)) { gst_structure_set (structure, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL); /* if pixel aspect ratio, make a range of it */ if (gst_structure_has_field (structure, "pixel-aspect-ratio")) { gst_structure_set (structure, "pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL); } } gst_caps_append_structure_full (ret, structure, gst_caps_features_copy (features)); } if (filter) { GstCaps *intersection; intersection = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (ret); ret = intersection; } GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * _set_caps_features_with_passthrough (const GstCaps * caps, const gchar * feature_name, GstCapsFeatures * passthrough) { guint i, j, m, n; GstCaps *tmp; tmp = gst_caps_new_empty (); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { GstCapsFeatures *features, *orig_features; GstStructure *s = gst_caps_get_structure (caps, i); orig_features = gst_caps_get_features (caps, i); features = gst_caps_features_new (feature_name, NULL); if (gst_caps_features_is_any (orig_features)) { /* if we have any features, we add both the features with and without @passthrough */ gst_caps_append_structure_full (tmp, gst_structure_copy (s), gst_caps_features_copy (features)); m = gst_caps_features_get_size (passthrough); for (j = 0; j < m; j++) { const gchar *feature = gst_caps_features_get_nth (passthrough, j); /* if we already have the features */ if (gst_caps_features_contains (features, feature)) continue; gst_caps_features_add (features, feature); } } else { m = gst_caps_features_get_size (orig_features); for (j = 0; j < m; j++) { const gchar *feature = gst_caps_features_get_nth (orig_features, j); /* if we already have the features */ if (gst_caps_features_contains (features, feature)) continue; if (g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY) == 0) continue; if (gst_caps_features_contains (passthrough, feature)) { gst_caps_features_add (features, feature); } } } gst_caps_append_structure_full (tmp, gst_structure_copy (s), features); } return tmp; }
static gboolean _gst_caps_has_feature (const GstCaps * caps, const gchar * feature) { guint i; for (i = 0; i < gst_caps_get_size (caps); i++) { GstCapsFeatures *const features = gst_caps_get_features (caps, i); /* Skip ANY features, we need an exact match for correct evaluation */ if (gst_caps_features_is_any (features)) continue; if (gst_caps_features_contains (features, feature)) return TRUE; } return FALSE; }
GstCaps * gst_gl_overlay_compositor_add_caps (GstCaps * caps) { GstCaps *composition_caps; int i; composition_caps = gst_caps_copy (caps); for (i = 0; i < gst_caps_get_size (composition_caps); i++) { GstCapsFeatures *f = gst_caps_get_features (composition_caps, i); gst_caps_features_add (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); } caps = gst_caps_merge (composition_caps, caps); return caps; }
static gboolean gst_vtdec_negotiate_output_format (GstVtdec * vtdec, GstVideoCodecState * input_state) { GstCaps *caps = NULL, *peercaps = NULL, *templcaps; GstVideoFormat output_format; GstVideoCodecState *output_state = NULL; GstCapsFeatures *features; GstStructure *structure; const gchar *s; peercaps = gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec), NULL); /* Check if output supports GL caps by preference */ templcaps = gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec)); caps = gst_caps_intersect_full (templcaps, peercaps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (peercaps); gst_caps_unref (templcaps); caps = gst_caps_truncate (caps); structure = gst_caps_get_structure (caps, 0); s = gst_structure_get_string (structure, "format"); output_format = gst_video_format_from_string (s); features = gst_caps_features_copy (gst_caps_get_features (caps, 0)); gst_caps_unref (caps); if (!gst_vtdec_create_session (vtdec, output_format)) { gst_caps_features_free (features); return FALSE; } output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec), output_format, vtdec->video_info.width, vtdec->video_info.height, input_state); output_state->caps = gst_video_info_to_caps (&output_state->info); gst_caps_set_features (output_state->caps, 0, features); return TRUE; }
/*********************************************************** * Name: init_textures * * Arguments: * APP_STATE_T *state - holds OGLES model info * * Description: Initialise OGL|ES texture surfaces to use image * buffers * * Returns: void * ***********************************************************/ static void init_textures (APP_STATE_T * state, GstBuffer * buffer) { GstCapsFeatures *feature = gst_caps_get_features (state->caps, 0); if (gst_caps_features_contains (feature, "memory:EGLImage")) { /* nothing special to do */ g_print ("Prepare texture for EGLImage\n"); state->can_avoid_upload = FALSE; glGenTextures (1, &state->tex); glBindTexture (GL_TEXTURE_2D, state->tex); } else if (gst_caps_features_contains (feature, "memory:GLMemory")) { g_print ("Prepare texture for GLMemory\n"); state->can_avoid_upload = TRUE; state->tex = 0; } else if (gst_caps_features_contains (feature, "meta:GstVideoGLTextureUploadMeta")) { GstVideoMeta *meta = NULL; g_print ("Prepare texture for GstVideoGLTextureUploadMeta\n"); meta = gst_buffer_get_video_meta (buffer); state->can_avoid_upload = FALSE; glGenTextures (1, &state->tex); glBindTexture (GL_TEXTURE_2D, state->tex); glTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA8, meta->width, meta->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); } else { g_assert_not_reached (); } #if 0 glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); #else glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); #endif glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); assert (glGetError () == GL_NO_ERROR); }
/* * Takes caps and copies its video fields to tmpl_caps */ static GstCaps * __gst_video_element_proxy_caps (GstElement * element, GstCaps * templ_caps, GstCaps * caps) { GstCaps *result = gst_caps_new_empty (); gint i, j; gint templ_caps_size = gst_caps_get_size (templ_caps); gint caps_size = gst_caps_get_size (caps); for (i = 0; i < templ_caps_size; i++) { GQuark q_name = gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i)); GstCapsFeatures *features = gst_caps_get_features (templ_caps, i); for (j = 0; j < caps_size; j++) { const GstStructure *caps_s = gst_caps_get_structure (caps, j); const GValue *val; GstStructure *s; GstCaps *tmp = gst_caps_new_empty (); s = gst_structure_new_id_empty (q_name); if ((val = gst_structure_get_value (caps_s, "width"))) gst_structure_set_value (s, "width", val); if ((val = gst_structure_get_value (caps_s, "height"))) gst_structure_set_value (s, "height", val); if ((val = gst_structure_get_value (caps_s, "framerate"))) gst_structure_set_value (s, "framerate", val); if ((val = gst_structure_get_value (caps_s, "pixel-aspect-ratio"))) gst_structure_set_value (s, "pixel-aspect-ratio", val); if ((val = gst_structure_get_value (caps_s, "colorimetry"))) gst_structure_set_value (s, "colorimetry", val); if ((val = gst_structure_get_value (caps_s, "chroma-site"))) gst_structure_set_value (s, "chroma-site", val); gst_caps_append_structure_full (tmp, s, gst_caps_features_copy (features)); result = gst_caps_merge (result, tmp); } } return result; }
/** * gst_dvbsub_overlay_add_feature_and_intersect: * * Creates a new #GstCaps containing the (given caps + * given caps feature) + (given caps intersected by the * given filter). * * Returns: the new #GstCaps */ static GstCaps * gst_dvbsub_overlay_add_feature_and_intersect (GstCaps * caps, const gchar * feature, GstCaps * filter) { int i, caps_size; GstCaps *new_caps; new_caps = gst_caps_copy (caps); caps_size = gst_caps_get_size (new_caps); for (i = 0; i < caps_size; i++) { GstCapsFeatures *features = gst_caps_get_features (new_caps, i); if (!gst_caps_features_is_any (features)) { gst_caps_features_add (features, feature); } } gst_caps_append (new_caps, gst_caps_intersect_full (caps, filter, GST_CAPS_INTERSECT_FIRST)); return new_caps; }
static GstCaps * _set_caps_features_with_passthrough (const GstCaps * caps, const gchar * feature_name, GstCapsFeatures * passthrough) { guint i, j, m, n; GstCaps *tmp; tmp = gst_caps_copy (caps); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { GstCapsFeatures *features, *orig_features; orig_features = gst_caps_get_features (caps, i); features = gst_caps_features_new (feature_name, NULL); m = gst_caps_features_get_size (orig_features); for (j = 0; j < m; j++) { const gchar *feature = gst_caps_features_get_nth (orig_features, j); /* if we already have the features */ if (gst_caps_features_contains (features, feature)) continue; if (g_strcmp0 (feature, GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY) == 0) continue; if (passthrough && gst_caps_features_contains (passthrough, feature)) { gst_caps_features_add (features, feature); } } gst_caps_set_features (tmp, i, features); } return tmp; }
/* * Takes caps and copies its audio fields to tmpl_caps */ static GstCaps * __gst_audio_element_proxy_caps (GstElement * element, GstCaps * templ_caps, GstCaps * caps) { GstCaps *result = gst_caps_new_empty (); gint i, j; gint templ_caps_size = gst_caps_get_size (templ_caps); gint caps_size = gst_caps_get_size (caps); for (i = 0; i < templ_caps_size; i++) { GQuark q_name = gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i)); GstCapsFeatures *features = gst_caps_get_features (templ_caps, i); for (j = 0; j < caps_size; j++) { const GstStructure *caps_s = gst_caps_get_structure (caps, j); const GValue *val; GstStructure *s; GstCaps *tmp = gst_caps_new_empty (); s = gst_structure_new_id_empty (q_name); if ((val = gst_structure_get_value (caps_s, "rate"))) gst_structure_set_value (s, "rate", val); if ((val = gst_structure_get_value (caps_s, "channels"))) gst_structure_set_value (s, "channels", val); if ((val = gst_structure_get_value (caps_s, "channels-mask"))) gst_structure_set_value (s, "channels-mask", val); gst_caps_append_structure_full (tmp, s, gst_caps_features_copy (features)); result = gst_caps_merge (result, tmp); } } return result; }
GstCaps * gst_gl_caps_replace_all_caps_features (const GstCaps * caps, const gchar * feature_name) { GstCaps *tmp = gst_caps_copy (caps); guint n = gst_caps_get_size (tmp); guint i = 0; for (i = 0; i < n; i++) { GstCapsFeatures *features = gst_caps_get_features (tmp, i); if (features) { guint n_f = gst_caps_features_get_size (features); guint j = 0; for (j = 0; j < n_f; j++) { gst_caps_features_remove_id (features, gst_caps_features_get_nth_id (features, j)); } } gst_caps_features_add (features, feature_name); } return tmp; }
gboolean _owr_gst_caps_foreach(const GstCaps *caps, OwrGstCapsForeachFunc func, gpointer user_data) { guint i, n; GstCapsFeatures *features; GstStructure *structure; gboolean ret; g_return_val_if_fail(GST_IS_CAPS(caps), FALSE); g_return_val_if_fail(func != NULL, FALSE); n = gst_caps_get_size(caps); for (i = 0; i < n; i++) { features = gst_caps_get_features(caps, i); structure = gst_caps_get_structure(caps, i); if (features && structure) { ret = func(features, structure, user_data); if (G_UNLIKELY(!ret)) return FALSE; } } return TRUE; }
GstVaapiCapsFeature gst_vaapi_find_preferred_caps_feature (GstPad * pad, GstCaps * allowed_caps, GstVideoFormat * out_format_ptr) { GstVaapiCapsFeature feature = GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED; guint i, j, num_structures; GstCaps *peer_caps, *out_caps = NULL, *caps = NULL; static const guint feature_list[] = { GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE, GST_VAAPI_CAPS_FEATURE_DMABUF, GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META, GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY, }; /* query with no filter */ peer_caps = gst_pad_peer_query_caps (pad, NULL); if (!peer_caps) goto cleanup; if (gst_caps_is_empty (peer_caps)) goto cleanup; /* filter against our allowed caps */ out_caps = gst_caps_intersect_full (allowed_caps, peer_caps, GST_CAPS_INTERSECT_FIRST); /* default feature */ feature = GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY; /* if downstream requests caps ANY, system memory is preferred */ if (gst_caps_is_any (peer_caps)) goto find_format; num_structures = gst_caps_get_size (out_caps); for (i = 0; i < num_structures; i++) { GstCapsFeatures *const features = gst_caps_get_features (out_caps, i); GstStructure *const structure = gst_caps_get_structure (out_caps, i); /* Skip ANY features, we need an exact match for correct evaluation */ if (gst_caps_features_is_any (features)) continue; gst_caps_replace (&caps, NULL); caps = gst_caps_new_full (gst_structure_copy (structure), NULL); if (!caps) continue; gst_caps_set_features (caps, 0, gst_caps_features_copy (features)); for (j = 0; j < G_N_ELEMENTS (feature_list); j++) { if (gst_vaapi_caps_feature_contains (caps, feature_list[j]) && feature < feature_list[j]) { feature = feature_list[j]; break; } } /* Stop at the first match, the caps should already be sorted out by preference order from downstream elements */ if (feature != GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY) break; } if (!caps) goto cleanup; find_format: if (out_format_ptr) { GstVideoFormat out_format; GstStructure *structure = NULL; const GValue *format_list; GstCapsFeatures *features; /* if the best feature is SystemMemory, we should choose the * vidoe/x-raw caps in the filtered peer caps set. If not, use * the first caps, which is the preferred by downstream. */ if (feature == GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY) { gst_caps_replace (&caps, out_caps); num_structures = gst_caps_get_size (caps); for (i = 0; i < num_structures; i++) { structure = gst_caps_get_structure (caps, i); features = gst_caps_get_features (caps, i); if (!gst_caps_features_is_any (features) && gst_caps_features_contains (features, gst_vaapi_caps_feature_to_string (GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY))) break; } } else { structure = gst_caps_get_structure (caps, 0); } if (!structure) goto cleanup; format_list = gst_structure_get_value (structure, "format"); if (!format_list) goto cleanup; out_format = gst_vaapi_find_preferred_format (format_list, *out_format_ptr); if (out_format == GST_VIDEO_FORMAT_UNKNOWN) goto cleanup; *out_format_ptr = out_format; } cleanup: gst_caps_replace (&caps, NULL); gst_caps_replace (&out_caps, NULL); gst_caps_replace (&peer_caps, NULL); return feature; }
static gboolean gst_msdkvpp_set_caps (GstBaseTransform * trans, GstCaps * caps, GstCaps * out_caps) { GstMsdkVPP *thiz = GST_MSDKVPP (trans); GstVideoInfo in_info, out_info; gboolean sinkpad_info_changed = FALSE; gboolean srcpad_info_changed = FALSE; gboolean deinterlace; if (gst_caps_get_features (caps, 0) != gst_caps_get_features (out_caps, 0)) thiz->need_vpp = 1; gst_video_info_from_caps (&in_info, caps); gst_video_info_from_caps (&out_info, out_caps); if (!gst_video_info_is_equal (&in_info, &thiz->sinkpad_info)) sinkpad_info_changed = TRUE; if (!gst_video_info_is_equal (&out_info, &thiz->srcpad_info)) srcpad_info_changed = TRUE; thiz->sinkpad_info = in_info; thiz->srcpad_info = out_info; #ifndef _WIN32 thiz->use_video_memory = TRUE; #else thiz->use_video_memory = FALSE; #endif if (!sinkpad_info_changed && !srcpad_info_changed && thiz->initialized) return TRUE; /* check for deinterlace requirement */ deinterlace = gst_msdkvpp_is_deinterlace_enabled (thiz, &in_info); if (deinterlace) thiz->flags |= GST_MSDK_FLAG_DEINTERLACE; thiz->buffer_duration = GST_VIDEO_INFO_FPS_N (&out_info) > 0 ? gst_util_uint64_scale (GST_SECOND, GST_VIDEO_INFO_FPS_D (&out_info), GST_VIDEO_INFO_FPS_N (&out_info)) : 0; if (!gst_msdkvpp_initialize (thiz)) return FALSE; /* set passthrough according to filter operation change */ gst_msdkvpp_set_passthrough (thiz); /* Ensure sinkpad buffer pool */ thiz->sinkpad_buffer_pool = gst_msdkvpp_create_buffer_pool (thiz, GST_PAD_SINK, caps, thiz->in_num_surfaces); if (!thiz->sinkpad_buffer_pool) { GST_ERROR_OBJECT (thiz, "Failed to ensure the sinkpad buffer pool"); return FALSE; } /* Ensure a srcpad buffer pool */ thiz->srcpad_buffer_pool = gst_msdkvpp_create_buffer_pool (thiz, GST_PAD_SRC, out_caps, thiz->out_num_surfaces); if (!thiz->srcpad_buffer_pool) { GST_ERROR_OBJECT (thiz, "Failed to ensure the srcpad buffer pool"); return FALSE; } return TRUE; }
static gboolean gst_vtdec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query) { gboolean ret; GstCaps *caps; GstCapsFeatures *features; GstVtdec *vtdec = GST_VTDEC (decoder); ret = GST_VIDEO_DECODER_CLASS (gst_vtdec_parent_class)->decide_allocation (decoder, query); if (!ret) goto out; gst_query_parse_allocation (query, &caps, NULL); if (caps) { GstGLContext *gl_context = NULL; features = gst_caps_get_features (caps, 0); if (gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) { GstContext *context = NULL; GstQuery *query = gst_query_new_context ("gst.gl.local_context"); if (gst_pad_peer_query (GST_VIDEO_DECODER_SRC_PAD (decoder), query)) { gst_query_parse_context (query, &context); if (context) { const GstStructure *s = gst_context_get_structure (context); gst_structure_get (s, "context", GST_GL_TYPE_CONTEXT, &gl_context, NULL); } } gst_query_unref (query); if (context) { GstVideoFormat internal_format; GstVideoCodecState *output_state = gst_video_decoder_get_output_state (decoder); GST_INFO_OBJECT (decoder, "pushing textures. GL context %p", context); if (vtdec->texture_cache) gst_core_video_texture_cache_free (vtdec->texture_cache); #ifdef HAVE_IOS internal_format = GST_VIDEO_FORMAT_NV12; #else internal_format = GST_VIDEO_FORMAT_UYVY; #endif vtdec->texture_cache = gst_core_video_texture_cache_new (gl_context); gst_core_video_texture_cache_set_format (vtdec->texture_cache, internal_format, output_state->caps); gst_video_codec_state_unref (output_state); gst_object_unref (gl_context); } else { GST_WARNING_OBJECT (decoder, "got memory:GLMemory caps but not GL context from downstream element"); } } } out: return ret; }
/* Based on gstbasetextoverlay.c */ static gboolean gst_overlay_composition_negotiate (GstOverlayComposition * self, GstCaps * caps) { gboolean upstream_has_meta = FALSE; gboolean caps_has_meta = FALSE; gboolean alloc_has_meta = FALSE; gboolean attach = FALSE; gboolean ret = TRUE; guint width, height; GstCapsFeatures *f; GstCaps *overlay_caps; GstQuery *query; guint alloc_index; GST_DEBUG_OBJECT (self, "performing negotiation"); /* Clear any pending reconfigure to avoid negotiating twice */ gst_pad_check_reconfigure (self->srcpad); self->window_width = self->window_height = 0; if (!caps) caps = gst_pad_get_current_caps (self->sinkpad); else gst_caps_ref (caps); if (!caps || gst_caps_is_empty (caps)) goto no_format; /* Check if upstream caps have meta */ if ((f = gst_caps_get_features (caps, 0))) { upstream_has_meta = gst_caps_features_contains (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); } /* Initialize dimensions */ width = self->info.width; height = self->info.height; if (upstream_has_meta) { overlay_caps = gst_caps_ref (caps); } else { GstCaps *peercaps; /* BaseTransform requires caps for the allocation query to work */ overlay_caps = gst_caps_copy (caps); f = gst_caps_get_features (overlay_caps, 0); gst_caps_features_add (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); /* Then check if downstream accept overlay composition in caps */ /* FIXME: We should probably check if downstream *prefers* the * overlay meta, and only enforce usage of it if we can't handle * the format ourselves and thus would have to drop the overlays. * Otherwise we should prefer what downstream wants here. */ peercaps = gst_pad_peer_query_caps (self->srcpad, overlay_caps); caps_has_meta = !gst_caps_is_empty (peercaps); gst_caps_unref (peercaps); GST_DEBUG_OBJECT (self, "caps have overlay meta %d", caps_has_meta); } if (upstream_has_meta || caps_has_meta) { /* Send caps immediatly, it's needed by GstBaseTransform to get a reply * from allocation query */ ret = gst_pad_set_caps (self->srcpad, overlay_caps); /* First check if the allocation meta has compositon */ query = gst_query_new_allocation (overlay_caps, FALSE); if (!gst_pad_peer_query (self->srcpad, query)) { /* no problem, we use the query defaults */ GST_DEBUG_OBJECT (self, "ALLOCATION query failed"); /* In case we were flushing, mark reconfigure and fail this method, * will make it retry */ if (GST_PAD_IS_FLUSHING (self->srcpad)) ret = FALSE; } alloc_has_meta = gst_query_find_allocation_meta (query, GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, &alloc_index); GST_DEBUG_OBJECT (self, "sink alloc has overlay meta %d", alloc_has_meta); if (alloc_has_meta) { const GstStructure *params; gst_query_parse_nth_allocation_meta (query, alloc_index, ¶ms); if (params) { if (gst_structure_get (params, "width", G_TYPE_UINT, &width, "height", G_TYPE_UINT, &height, NULL)) { GST_DEBUG_OBJECT (self, "received window size: %dx%d", width, height); g_assert (width != 0 && height != 0); } } } gst_query_unref (query); } /* Update render size if needed */ self->window_width = width; self->window_height = height; /* For backward compatbility, we will prefer bliting if downstream * allocation does not support the meta. In other case we will prefer * attaching, and will fail the negotiation in the unlikely case we are * force to blit, but format isn't supported. */ if (upstream_has_meta) { attach = TRUE; } else if (caps_has_meta) { if (alloc_has_meta) { attach = TRUE; } else { /* Don't attach unless we cannot handle the format */ attach = !can_blend_caps (caps); } } else { ret = can_blend_caps (caps); } /* If we attach, then pick the overlay caps */ if (attach) { GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, overlay_caps); /* Caps where already sent */ } else if (ret) { GST_DEBUG_OBJECT (self, "Using caps %" GST_PTR_FORMAT, caps); ret = gst_pad_set_caps (self->srcpad, caps); } self->attach_compo_to_buffer = attach; if (!ret) { GST_DEBUG_OBJECT (self, "negotiation failed, schedule reconfigure"); gst_pad_mark_reconfigure (self->srcpad); } g_signal_emit (self, overlay_composition_signals[SIGNAL_CAPS_CHANGED], 0, caps, self->window_width, self->window_height, NULL); gst_caps_unref (overlay_caps); gst_caps_unref (caps); return ret; no_format: { if (caps) gst_caps_unref (caps); gst_pad_mark_reconfigure (self->srcpad); return FALSE; } }
static gboolean gst_vtdec_negotiate (GstVideoDecoder * decoder) { GstVideoCodecState *output_state = NULL; GstCaps *peercaps = NULL, *caps = NULL, *templcaps = NULL, *prevcaps = NULL; GstVideoFormat format; GstStructure *structure; const gchar *s; GstVtdec *vtdec; OSStatus err = noErr; GstCapsFeatures *features = NULL; gboolean output_textures; vtdec = GST_VTDEC (decoder); if (vtdec->session) gst_vtdec_push_frames_if_needed (vtdec, TRUE, FALSE); output_state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (vtdec)); if (output_state) { prevcaps = gst_caps_ref (output_state->caps); gst_video_codec_state_unref (output_state); } peercaps = gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec), NULL); if (prevcaps && gst_caps_can_intersect (prevcaps, peercaps)) { /* The hardware decoder can become (temporarily) unavailable across * VTDecompressionSessionCreate/Destroy calls. So if the currently configured * caps are still accepted by downstream we keep them so we don't have to * destroy and recreate the session. */ GST_INFO_OBJECT (vtdec, "current and peer caps are compatible, keeping current caps"); caps = gst_caps_ref (prevcaps); } else { templcaps = gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD (decoder)); caps = gst_caps_intersect_full (peercaps, templcaps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (templcaps); } gst_caps_unref (peercaps); caps = gst_caps_truncate (gst_caps_make_writable (caps)); structure = gst_caps_get_structure (caps, 0); s = gst_structure_get_string (structure, "format"); format = gst_video_format_from_string (s); features = gst_caps_get_features (caps, 0); if (features) features = gst_caps_features_copy (features); output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec), format, vtdec->video_info.width, vtdec->video_info.height, vtdec->input_state); output_state->caps = gst_video_info_to_caps (&output_state->info); if (features) { gst_caps_set_features (output_state->caps, 0, features); output_textures = gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY); if (output_textures) gst_caps_set_simple (output_state->caps, "texture-target", G_TYPE_STRING, #if !HAVE_IOS GST_GL_TEXTURE_TARGET_RECTANGLE_STR, #else GST_GL_TEXTURE_TARGET_2D_STR, #endif NULL); } gst_caps_unref (caps); if (!prevcaps || !gst_caps_is_equal (prevcaps, output_state->caps)) { gboolean renegotiating = vtdec->session != NULL; GST_INFO_OBJECT (vtdec, "negotiated output format %" GST_PTR_FORMAT " previous %" GST_PTR_FORMAT, output_state->caps, prevcaps); if (vtdec->session) gst_vtdec_invalidate_session (vtdec); err = gst_vtdec_create_session (vtdec, format, TRUE); if (err == noErr) { GST_INFO_OBJECT (vtdec, "using hardware decoder"); } else if (err == kVTVideoDecoderNotAvailableNowErr && renegotiating) { GST_WARNING_OBJECT (vtdec, "hw decoder not available anymore"); err = gst_vtdec_create_session (vtdec, format, FALSE); } if (err != noErr) { GST_ELEMENT_ERROR (vtdec, RESOURCE, FAILED, (NULL), ("VTDecompressionSessionCreate returned %d", (int) err)); } } if (vtdec->texture_cache != NULL && !output_textures) { gst_video_texture_cache_free (vtdec->texture_cache); vtdec->texture_cache = NULL; } if (err == noErr && output_textures) { /* call this regardless of whether caps have changed or not since a new * local context could have become available */ gst_gl_context_helper_ensure_context (vtdec->ctxh); GST_INFO_OBJECT (vtdec, "pushing textures, context %p old context %p", vtdec->ctxh->context, vtdec->texture_cache ? vtdec->texture_cache->ctx : NULL); if (vtdec->texture_cache && vtdec->texture_cache->ctx != vtdec->ctxh->context) { gst_video_texture_cache_free (vtdec->texture_cache); vtdec->texture_cache = NULL; } if (!vtdec->texture_cache) setup_texture_cache (vtdec, vtdec->ctxh->context); } if (prevcaps) gst_caps_unref (prevcaps); if (err != noErr) return FALSE; return GST_VIDEO_DECODER_CLASS (gst_vtdec_parent_class)->negotiate (decoder); }
static GstCaps * gst_video_rate_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter) { GstVideoRate *videorate = GST_VIDEO_RATE (trans); GstCaps *ret; GstStructure *s, *s1, *s2, *s3 = NULL; int maxrate = g_atomic_int_get (&videorate->max_rate); gint i; ret = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (caps); i++) { s = gst_caps_get_structure (caps, i); s1 = gst_structure_copy (s); if (videorate->updating_caps && direction == GST_PAD_SINK) { GST_INFO_OBJECT (trans, "Only updating caps %" GST_PTR_FORMAT " with framerate" " %d/%d", caps, videorate->to_rate_numerator, videorate->to_rate_denominator); gst_structure_set (s1, "framerate", GST_TYPE_FRACTION, videorate->to_rate_numerator, videorate->to_rate_denominator, NULL); ret = gst_caps_merge_structure (ret, s1); continue; } s2 = gst_structure_copy (s); s3 = NULL; if (videorate->drop_only) { gint min_num = 0, min_denom = 1; gint max_num = G_MAXINT, max_denom = 1; /* Clamp the caps to our maximum rate as the first caps if possible */ if (!gst_video_max_rate_clamp_structure (s1, maxrate, &min_num, &min_denom, &max_num, &max_denom)) { min_num = 0; min_denom = 1; max_num = maxrate; max_denom = 1; /* clamp wouldn't be a real subset of 1..maxrate, in this case the sink * caps should become [1..maxrate], [1..maxint] and the src caps just * [1..maxrate]. In case there was a caps incompatibility things will * explode later as appropriate :) * * In case [X..maxrate] == [X..maxint], skip as we'll set it later */ if (direction == GST_PAD_SRC && maxrate != G_MAXINT) gst_structure_set (s1, "framerate", GST_TYPE_FRACTION_RANGE, min_num, min_denom, maxrate, 1, NULL); else { gst_structure_free (s1); s1 = NULL; } } if (direction == GST_PAD_SRC) { /* We can accept anything as long as it's at least the minimal framerate * the the sink needs */ gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, min_num, min_denom, G_MAXINT, 1, NULL); /* Also allow unknown framerate, if it isn't already */ if (min_num != 0 || min_denom != 1) { s3 = gst_structure_copy (s); gst_structure_set (s3, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); } } else if (max_num != 0 || max_denom != 1) { /* We can provide everything upto the maximum framerate at the src */ gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, max_num, max_denom, NULL); } } else if (direction == GST_PAD_SINK) { gint min_num = 0, min_denom = 1; gint max_num = G_MAXINT, max_denom = 1; if (!gst_video_max_rate_clamp_structure (s1, maxrate, &min_num, &min_denom, &max_num, &max_denom)) { gst_structure_free (s1); s1 = NULL; } gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, maxrate, 1, NULL); } else { /* set the framerate as a range */ gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } if (s1 != NULL) ret = gst_caps_merge_structure_full (ret, s1, gst_caps_features_copy (gst_caps_get_features (caps, i))); ret = gst_caps_merge_structure_full (ret, s2, gst_caps_features_copy (gst_caps_get_features (caps, i))); if (s3 != NULL) ret = gst_caps_merge_structure_full (ret, s3, gst_caps_features_copy (gst_caps_get_features (caps, i))); } if (filter) { GstCaps *intersection; intersection = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (ret); ret = intersection; } return ret; }
/* only negotiate/query video overlay composition support for now */ static gboolean gst_dvbsub_overlay_negotiate (GstDVBSubOverlay * overlay, GstCaps * caps) { gboolean ret; gboolean attach = FALSE; gboolean caps_has_meta = TRUE; GstCapsFeatures *f; GST_DEBUG_OBJECT (overlay, "performing negotiation"); if (!caps) { caps = gst_pad_get_current_caps (overlay->srcpad); } else { gst_caps_ref (caps); } if (!caps || gst_caps_is_empty (caps)) goto no_format; /* Try to use the overlay meta if possible */ f = gst_caps_get_features (caps, 0); /* if the caps doesn't have the overlay meta, we query if downstream * accepts it before trying the version without the meta * If upstream already is using the meta then we can only use it */ if (!f || !gst_caps_features_contains (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION)) { GstCaps *overlay_caps; GstCaps *peercaps; /* In this case we added the meta, but we can work without it * so preserve the original caps so we can use it as a fallback */ overlay_caps = gst_caps_copy (caps); f = gst_caps_get_features (overlay_caps, 0); gst_caps_features_add (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); /* FIXME: We should probably check if downstream *prefers* the * overlay meta, and only enforce usage of it if we can't handle * the format ourselves and thus would have to drop the overlays. * Otherwise we should prefer what downstream wants here. */ peercaps = gst_pad_peer_query_caps (overlay->srcpad, NULL); caps_has_meta = gst_caps_can_intersect (peercaps, overlay_caps); gst_caps_unref (peercaps); GST_DEBUG_OBJECT (overlay, "Downstream accepts the overlay meta: %d", caps_has_meta); if (caps_has_meta) { gst_caps_unref (caps); caps = overlay_caps; } else { /* fallback to the original */ gst_caps_unref (overlay_caps); caps_has_meta = FALSE; } } GST_DEBUG_OBJECT (overlay, "Using caps %" GST_PTR_FORMAT, caps); ret = gst_pad_set_caps (overlay->srcpad, caps); if (ret) { GstQuery *query; /* find supported meta */ query = gst_query_new_allocation (caps, FALSE); if (!gst_pad_peer_query (overlay->srcpad, query)) { /* no problem, we use the query defaults */ GST_DEBUG_OBJECT (overlay, "ALLOCATION query failed"); } if (caps_has_meta && gst_query_find_allocation_meta (query, GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, NULL)) attach = TRUE; overlay->attach_compo_to_buffer = attach; gst_query_unref (query); } gst_caps_unref (caps); return ret; no_format: { if (caps) gst_caps_unref (caps); return FALSE; } }
GstVaapiCapsFeature gst_vaapi_find_preferred_caps_feature (GstPad * pad, GstVideoFormat format, GstVideoFormat * out_format_ptr) { GstVaapiCapsFeature feature = GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY; guint i, num_structures; GstCaps *caps = NULL; GstCaps *gl_texture_upload_caps = NULL; GstCaps *sysmem_caps = NULL; GstCaps *vaapi_caps = NULL; GstCaps *out_caps, *templ; GstVideoFormat out_format; templ = gst_pad_get_pad_template_caps (pad); out_caps = gst_pad_peer_query_caps (pad, templ); gst_caps_unref (templ); if (!out_caps) { feature = GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED; goto cleanup; } out_format = format == GST_VIDEO_FORMAT_ENCODED ? GST_VIDEO_FORMAT_I420 : format; gl_texture_upload_caps = new_gl_texture_upload_meta_caps (); if (!gl_texture_upload_caps) goto cleanup; vaapi_caps = gst_vaapi_video_format_new_template_caps_with_features (out_format, GST_CAPS_FEATURE_MEMORY_VAAPI_SURFACE); if (!vaapi_caps) goto cleanup; sysmem_caps = gst_vaapi_video_format_new_template_caps_with_features (out_format, GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY); if (!sysmem_caps) goto cleanup; num_structures = gst_caps_get_size (out_caps); for (i = 0; i < num_structures; i++) { GstCapsFeatures *const features = gst_caps_get_features (out_caps, i); GstStructure *const structure = gst_caps_get_structure (out_caps, i); #if GST_CHECK_VERSION(1,3,0) /* Skip ANY features, we need an exact match for correct evaluation */ if (gst_caps_features_is_any (features)) continue; #endif caps = gst_caps_new_full (gst_structure_copy (structure), NULL); if (!caps) continue; gst_caps_set_features (caps, 0, gst_caps_features_copy (features)); if (gst_caps_can_intersect (caps, vaapi_caps) && feature < GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE) feature = GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE; else if (gst_caps_can_intersect (caps, gl_texture_upload_caps) && feature < GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META) feature = GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META; else if (gst_caps_can_intersect (caps, sysmem_caps) && feature < GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY) feature = GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY; gst_caps_replace (&caps, NULL); #if GST_CHECK_VERSION(1,3,0) /* Stop at the first match, the caps should already be sorted out by preference order from downstream elements */ if (feature != GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY) break; #endif } if (out_format_ptr) { if (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META) { GstStructure *structure; gchar *format_str; out_format = GST_VIDEO_FORMAT_UNKNOWN; do { caps = gst_caps_intersect_full (out_caps, gl_texture_upload_caps, GST_CAPS_INTERSECT_FIRST); if (!caps) break; structure = gst_caps_get_structure (caps, 0); if (!structure) break; if (!gst_structure_get (structure, "format", G_TYPE_STRING, &format_str, NULL)) break; out_format = gst_video_format_from_string (format_str); g_free (format_str); } while (0); if (!out_format) goto cleanup; } *out_format_ptr = out_format; } cleanup: gst_caps_replace (&gl_texture_upload_caps, NULL); gst_caps_replace (&sysmem_caps, NULL); gst_caps_replace (&vaapi_caps, NULL); gst_caps_replace (&caps, NULL); gst_caps_replace (&out_caps, NULL); return feature; }
static GstCaps * gst_play_sink_convert_bin_getcaps (GstPad * pad, GstCaps * filter) { GstPlaySinkConvertBin *self = GST_PLAY_SINK_CONVERT_BIN (gst_pad_get_parent (pad)); GstCaps *ret; GstPad *otherpad, *peer; GST_PLAY_SINK_CONVERT_BIN_LOCK (self); if (pad == self->srcpad) { otherpad = self->sinkpad; } else if (pad == self->sinkpad) { otherpad = self->srcpad; } else { GST_ERROR_OBJECT (pad, "Not one of our pads"); otherpad = NULL; } if (otherpad) { peer = gst_pad_get_peer (otherpad); if (peer) { GstCaps *peer_caps; GstCaps *downstream_filter = NULL; /* Add all the caps that we can convert to to the filter caps, * otherwise downstream might just return EMPTY caps because * it doesn't handle the filter caps but we could still convert * to these caps */ if (filter) { guint i, n; downstream_filter = gst_caps_new_empty (); /* Intersect raw video caps in the filter caps with the converter * caps. This makes sure that we don't accept raw video that we * can't handle, e.g. because of caps features */ n = gst_caps_get_size (filter); for (i = 0; i < n; i++) { GstStructure *s; GstCaps *tmp, *tmp2; s = gst_structure_copy (gst_caps_get_structure (filter, i)); if (gst_structure_has_name (s, self->audio ? "audio/x-raw" : "video/x-raw")) { tmp = gst_caps_new_full (s, NULL); tmp2 = gst_caps_intersect (tmp, self->converter_caps); gst_caps_append (downstream_filter, tmp2); gst_caps_unref (tmp); } else { gst_caps_append_structure (downstream_filter, s); } } downstream_filter = gst_caps_merge (downstream_filter, gst_caps_ref (self->converter_caps)); } peer_caps = gst_pad_query_caps (peer, downstream_filter); if (downstream_filter) gst_caps_unref (downstream_filter); gst_object_unref (peer); if (self->converter_caps && is_raw_caps (peer_caps, self->audio)) { GstCaps *converter_caps = gst_caps_ref (self->converter_caps); GstCapsFeatures *cf; GstStructure *s; guint i, n; ret = gst_caps_make_writable (peer_caps); /* Filter out ANY capsfeatures from the converter caps. We can't * convert to ANY capsfeatures, they are only there so that we * can passthrough whatever downstream can support... but we * definitely don't want to return them here */ n = gst_caps_get_size (converter_caps); for (i = 0; i < n; i++) { s = gst_caps_get_structure (converter_caps, i); cf = gst_caps_get_features (converter_caps, i); if (cf && gst_caps_features_is_any (cf)) continue; ret = gst_caps_merge_structure_full (ret, gst_structure_copy (s), (cf ? gst_caps_features_copy (cf) : NULL)); } gst_caps_unref (converter_caps); } else { ret = peer_caps; } } else { ret = gst_caps_ref (self->converter_caps); } GST_PLAY_SINK_CONVERT_BIN_FILTER_CAPS (filter, ret); } else { ret = filter ? gst_caps_ref (filter) : gst_caps_new_any (); } GST_PLAY_SINK_CONVERT_BIN_UNLOCK (self); gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret); return ret; }
gboolean gst_gl_mixer_process_textures (GstGLMixer * mix, GstBuffer * outbuf) { guint i; GList *walk; guint out_tex, out_tex_target; gboolean res = TRUE; guint array_index = 0; GstVideoFrame out_frame; GstElement *element = GST_ELEMENT (mix); GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mix); GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix); GstGLMixerPrivate *priv = mix->priv; gboolean to_download = gst_caps_features_is_equal (GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY, gst_caps_get_features (mix->out_caps, 0)); GstMapFlags out_map_flags = GST_MAP_WRITE; GST_TRACE ("Processing buffers"); to_download |= !gst_is_gl_memory (gst_buffer_peek_memory (outbuf, 0)); if (!to_download) out_map_flags |= GST_MAP_GL; if (!gst_video_frame_map (&out_frame, &vagg->info, outbuf, out_map_flags)) { return FALSE; } if (!to_download) { out_tex = *(guint *) out_frame.data[0]; out_tex_target = ((GstGLMemory *) gst_buffer_peek_memory (outbuf, 0))->tex_target; } else { GST_INFO ("Output Buffer does not contain correct memory, " "attempting to wrap for download"); if (!mix->download) mix->download = gst_gl_download_new (mix->context); gst_gl_download_set_format (mix->download, &out_frame.info); out_tex = mix->out_tex_id; out_tex_target = GL_TEXTURE_2D; } GST_OBJECT_LOCK (mix); walk = element->sinkpads; i = mix->frames->len; g_ptr_array_set_size (mix->frames, element->numsinkpads); for (; i < element->numsinkpads; i++) mix->frames->pdata[i] = g_slice_new0 (GstGLMixerFrameData); while (walk) { GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data); GstGLMixerPadClass *pad_class = GST_GL_MIXER_PAD_GET_CLASS (pad); GstVideoAggregatorPad *vaggpad = walk->data; GstGLMixerFrameData *frame; frame = g_ptr_array_index (mix->frames, array_index); frame->pad = pad; frame->texture = 0; walk = g_list_next (walk); if (vaggpad->buffer != NULL) { g_assert (pad_class->upload_buffer); if (pad->gl_buffer) gst_buffer_unref (pad->gl_buffer); pad->gl_buffer = pad_class->upload_buffer (mix, frame, vaggpad->buffer); GST_DEBUG_OBJECT (pad, "uploaded buffer %" GST_PTR_FORMAT " from buffer %" GST_PTR_FORMAT, pad->gl_buffer, vaggpad->buffer); } ++array_index; } g_mutex_lock (&priv->gl_resource_lock); if (!priv->gl_resource_ready) g_cond_wait (&priv->gl_resource_cond, &priv->gl_resource_lock); if (!priv->gl_resource_ready) { g_mutex_unlock (&priv->gl_resource_lock); GST_ERROR_OBJECT (mix, "fbo used to render can't be created, do not run process_textures"); res = FALSE; goto out; } mix_class->process_textures (mix, mix->frames, out_tex); g_mutex_unlock (&priv->gl_resource_lock); if (to_download) { if (!gst_gl_download_perform_with_data (mix->download, out_tex, out_tex_target, out_frame.data)) { GST_ELEMENT_ERROR (mix, RESOURCE, NOT_FOUND, ("%s", "Failed to download video frame"), (NULL)); res = FALSE; goto out; } } out: i = 0; walk = GST_ELEMENT (mix)->sinkpads; while (walk) { GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data); if (pad->upload) gst_gl_upload_release_buffer (pad->upload); walk = g_list_next (walk); i++; } GST_OBJECT_UNLOCK (mix); gst_video_frame_unmap (&out_frame); return res; }