GstCaps * gst_gl_mixer_update_caps (GstGLMixer * mix, GstCaps * caps) { GstCaps *result = NULL; GstCaps *glcaps = gst_gl_mixer_set_caps_features (caps, GST_CAPS_FEATURE_MEMORY_GL_MEMORY); #if GST_GL_HAVE_PLATFORM_EGL GstCaps *eglcaps = gst_gl_mixer_set_caps_features (caps, GST_CAPS_FEATURE_MEMORY_EGL_IMAGE); #endif GstCaps *uploadcaps = gst_gl_mixer_set_caps_features (caps, GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META); GstCaps *raw_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS)); result = gst_caps_new_empty (); result = gst_caps_merge (result, glcaps); #if GST_GL_HAVE_PLATFORM_EGL result = gst_caps_merge (result, eglcaps); #endif result = gst_caps_merge (result, uploadcaps); result = gst_caps_merge (result, raw_caps); result = gst_caps_merge (result, gst_gl_mixer_caps_remove_format_info (caps)); GST_DEBUG_OBJECT (mix, "returning %" GST_PTR_FORMAT, result); return result; }
static GstCaps * gst_rtp_opus_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad, GstCaps * filter) { GstCaps *caps, *peercaps, *tcaps; GstStructure *s; const gchar *stereo; if (pad == GST_RTP_BASE_PAYLOAD_SRCPAD (payload)) return GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_opus_pay_parent_class)->get_caps (payload, pad, filter); tcaps = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); peercaps = gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), tcaps); gst_caps_unref (tcaps); if (!peercaps) return GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_opus_pay_parent_class)->get_caps (payload, pad, filter); if (gst_caps_is_empty (peercaps)) return peercaps; caps = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload)); s = gst_caps_get_structure (peercaps, 0); stereo = gst_structure_get_string (s, "stereo"); if (stereo != NULL) { caps = gst_caps_make_writable (caps); if (!strcmp (stereo, "1")) { GstCaps *caps2 = gst_caps_copy (caps); gst_caps_set_simple (caps, "channels", G_TYPE_INT, 2, NULL); gst_caps_set_simple (caps2, "channels", G_TYPE_INT, 1, NULL); caps = gst_caps_merge (caps, caps2); } else if (!strcmp (stereo, "0")) { GstCaps *caps2 = gst_caps_copy (caps); gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL); gst_caps_set_simple (caps2, "channels", G_TYPE_INT, 2, NULL); caps = gst_caps_merge (caps, caps2); } } gst_caps_unref (peercaps); if (filter) { GstCaps *tmp = gst_caps_intersect_full (caps, filter, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (caps); caps = tmp; } GST_DEBUG_OBJECT (payload, "Returning caps: %" GST_PTR_FORMAT, caps); return caps; }
void test_merge_same() { GstCaps *c1, *c2; //xmlfile = "test_merge_same"; std_log(LOG_FILENAME_LINE, "Test Started test_merge_same"); /* this is the same */ c1 = gst_caps_from_string ("audio/x-raw-int,rate=44100,channels=1"); c2 = gst_caps_from_string ("audio/x-raw-int,rate=44100,channels=1"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 1, NULL); gst_caps_unref (c2); /* and so is this */ c1 = gst_caps_from_string ("audio/x-raw-int,rate=44100,channels=1"); c2 = gst_caps_from_string ("audio/x-raw-int,channels=1,rate=44100"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 1, NULL); gst_caps_unref (c2); std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); /* c1 = gst_caps_from_string ("video/x-foo, data=(buffer)AA"); c2 = gst_caps_from_string ("video/x-foo, data=(buffer)AABB"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 2, NULL); gst_caps_unref (c2); c1 = gst_caps_from_string ("video/x-foo, data=(buffer)AABB"); c2 = gst_caps_from_string ("video/x-foo, data=(buffer)AA"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 2, NULL); gst_caps_unref (c2); c1 = gst_caps_from_string ("video/x-foo, data=(buffer)AA"); c2 = gst_caps_from_string ("video/x-foo, data=(buffer)AA"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 1, NULL); gst_caps_unref (c2); c1 = gst_caps_from_string ("video/x-foo, data=(buffer)AA"); c2 = gst_caps_from_string ("video/x-bar, data=(buffer)AA"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 2, NULL); gst_caps_unref (c2); --------commented---------------*/ }
static GstCaps * gst_gl_mixer_pad_sink_getcaps (GstPad * pad, GstGLMixer * mix, GstCaps * filter) { GstCaps *srccaps; GstCaps *template_caps; GstCaps *filtered_caps; GstCaps *returned_caps; gboolean had_current_caps = TRUE; template_caps = gst_pad_get_pad_template_caps (pad); srccaps = gst_pad_get_current_caps (pad); if (srccaps == NULL) { had_current_caps = FALSE; srccaps = template_caps; } else { srccaps = gst_caps_merge (srccaps, gst_gl_mixer_update_caps (mix, srccaps)); } filtered_caps = srccaps; if (filter) filtered_caps = gst_caps_intersect (srccaps, filter); returned_caps = gst_caps_intersect (filtered_caps, template_caps); if (filter) gst_caps_unref (filtered_caps); if (had_current_caps) gst_caps_unref (template_caps); GST_DEBUG_OBJECT (pad, "returning %" GST_PTR_FORMAT, returned_caps); return returned_caps; }
/* Get the intersection of parser caps and available (sorted) decoders */ static GstCaps * get_parser_caps_filter (GstDecodebin3 * dbin, GstCaps * caps) { GList *tmp; GstCaps *filter_caps = gst_caps_new_empty (); g_mutex_lock (&dbin->factories_lock); gst_decode_bin_update_factories_list (dbin); for (tmp = dbin->decoder_factories; tmp; tmp = tmp->next) { GstElementFactory *factory = (GstElementFactory *) tmp->data; GstCaps *tcaps, *intersection; const GList *tmps; GST_LOG ("Trying factory %s", gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory))); for (tmps = gst_element_factory_get_static_pad_templates (factory); tmps; tmps = tmps->next) { GstStaticPadTemplate *st = (GstStaticPadTemplate *) tmps->data; if (st->direction != GST_PAD_SINK || st->presence != GST_PAD_ALWAYS) continue; tcaps = gst_static_pad_template_get_caps (st); intersection = gst_caps_intersect_full (tcaps, caps, GST_CAPS_INTERSECT_FIRST); filter_caps = gst_caps_merge (filter_caps, intersection); gst_caps_unref (tcaps); } } g_mutex_unlock (&dbin->factories_lock); GST_DEBUG_OBJECT (dbin, "Got filter caps %" GST_PTR_FORMAT, filter_caps); return filter_caps; }
/** * gst_vaapi_profile_get_caps: * @profile: a #GstVaapiProfile * * Converts a #GstVaapiProfile into the corresponding #GstCaps. If no * matching caps were found, %NULL is returned. * * Return value: the newly allocated #GstCaps, or %NULL if none was found */ GstCaps * gst_vaapi_profile_get_caps(GstVaapiProfile profile) { const GstVaapiProfileMap *m; GstCaps *out_caps, *caps; out_caps = gst_caps_new_empty(); if (!out_caps) return NULL; for (m = gst_vaapi_profiles; m->profile; m++) { if (m->profile != profile) continue; caps = gst_caps_from_string(m->media_str); if (!caps) continue; gst_caps_set_simple( caps, "profile", G_TYPE_STRING, m->profile_str, NULL ); out_caps = gst_caps_merge(out_caps, caps); } return out_caps; }
static GstCaps * add_h264_profile_in_caps (GstCaps * caps, const gchar * profile_name) { GstCaps *caps_new = gst_caps_new_simple ("video/x-h264", "profile", G_TYPE_STRING, profile_name, NULL); return gst_caps_merge (caps_new, caps); }
static gboolean gst_vaapidecode_ensure_allowed_sinkpad_caps (GstVaapiDecode * decode) { GstCaps *caps, *allowed_sinkpad_caps; GArray *profiles; guint i; profiles = gst_vaapi_display_get_decode_profiles (GST_VAAPI_PLUGIN_BASE_DISPLAY (decode)); if (!profiles) goto error_no_profiles; allowed_sinkpad_caps = gst_caps_new_empty (); if (!allowed_sinkpad_caps) goto error_no_memory; for (i = 0; i < profiles->len; i++) { const GstVaapiProfile profile = g_array_index (profiles, GstVaapiProfile, i); const gchar *media_type_name; const gchar *profile_name; GstStructure *structure; media_type_name = gst_vaapi_profile_get_media_type_name (profile); if (!media_type_name) continue; caps = gst_caps_from_string (media_type_name); if (!caps) continue; structure = gst_caps_get_structure (caps, 0); profile_name = gst_vaapi_profile_get_name (profile); if (profile_name) gst_structure_set (structure, "profile", G_TYPE_STRING, profile_name, NULL); allowed_sinkpad_caps = gst_caps_merge (allowed_sinkpad_caps, caps); } decode->allowed_sinkpad_caps = gst_caps_simplify (allowed_sinkpad_caps); g_array_unref (profiles); return TRUE; /* ERRORS */ error_no_profiles: { GST_ERROR ("failed to retrieve VA decode profiles"); return FALSE; } error_no_memory: { GST_ERROR ("failed to allocate allowed-caps set"); g_array_unref (profiles); return FALSE; } }
void test_merge_fundamental() { GstCaps *c1, *c2; //xmlfile = "test_merge_fundamental"; std_log(LOG_FILENAME_LINE, "Test Started test_merge_fundamental"); /* ANY + specific = ANY */ c1 = gst_caps_from_string ("audio/x-raw-int,rate=44100"); c2 = gst_caps_new_any (); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 0, NULL); fail_unless (gst_caps_is_any (c2), NULL); gst_caps_unref (c2); /* specific + ANY = ANY */ c2 = gst_caps_from_string ("audio/x-raw-int,rate=44100"); c1 = gst_caps_new_any (); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 0, NULL); fail_unless (gst_caps_is_any (c2), NULL); gst_caps_unref (c2); /* EMPTY + specific = specific */ c1 = gst_caps_from_string ("audio/x-raw-int,rate=44100"); c2 = gst_caps_new_empty (); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 1, NULL); fail_if (gst_caps_is_empty (c2), NULL); gst_caps_unref (c2); /* specific + EMPTY = specific */ c2 = gst_caps_from_string ("audio/x-raw-int,rate=44100"); c1 = gst_caps_new_empty (); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 1, NULL); fail_if (gst_caps_is_empty (c2), NULL); gst_caps_unref (c2); std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); }
static GstCaps * gst_stream_splitter_sink_getcaps (GstPad * pad, GstCaps * filter) { GstStreamSplitter *stream_splitter = (GstStreamSplitter *) GST_PAD_PARENT (pad); guint32 cookie; GList *tmp; GstCaps *res = NULL; /* Return the combination of all downstream caps */ STREAMS_LOCK (stream_splitter); resync: if (G_UNLIKELY (stream_splitter->srcpads == NULL)) { res = (filter ? gst_caps_ref (filter) : gst_caps_new_any ()); goto beach; } res = NULL; cookie = stream_splitter->cookie; tmp = stream_splitter->srcpads; while (tmp) { GstPad *srcpad = (GstPad *) tmp->data; /* Ensure srcpad doesn't get destroyed while we query peer */ gst_object_ref (srcpad); STREAMS_UNLOCK (stream_splitter); if (res) { GstCaps *peercaps = gst_pad_peer_query_caps (srcpad, filter); if (peercaps) res = gst_caps_merge (res, peercaps); } else { res = gst_pad_peer_query_caps (srcpad, filter); } STREAMS_LOCK (stream_splitter); gst_object_unref (srcpad); if (G_UNLIKELY (cookie != stream_splitter->cookie)) { if (res) gst_caps_unref (res); goto resync; } tmp = tmp->next; } beach: STREAMS_UNLOCK (stream_splitter); return res; }
void test_merge_subset() { GstCaps *c1, *c2; //xmlfile = "test_merge_subset"; std_log(LOG_FILENAME_LINE, "Test Started test_merge_subset"); /* the 2nd is already covered */ c2 = gst_caps_from_string ("audio/x-raw-int,channels=[1,2]"); c1 = gst_caps_from_string ("audio/x-raw-int,channels=1"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 1, NULL); gst_caps_unref (c2); /* here it is not */ c2 = gst_caps_from_string ("audio/x-raw-int,channels=1,rate=44100"); c1 = gst_caps_from_string ("audio/x-raw-int,channels=[1,2],rate=44100"); gst_caps_merge (c2, c1); GST_DEBUG ("merged: (%d) %" GST_PTR_FORMAT, gst_caps_get_size (c2), c2); fail_unless (gst_caps_get_size (c2) == 2, NULL); gst_caps_unref (c2); std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); }
/** * gst_encoding_profile_get_input_caps: * @profile: a #GstEncodingProfile * * Computes the full output caps that this @profile will be able to consume. * * Returns: (transfer full): The full caps the given @profile can consume. Call * gst_caps_unref() when you are done with the caps. */ GstCaps * gst_encoding_profile_get_input_caps (GstEncodingProfile * profile) { GstCaps *out, *tmp; GList *ltmp; GstStructure *st, *outst; GQuark out_name; guint i, len; GstCaps *fcaps; g_return_val_if_fail (GST_IS_ENCODING_PROFILE (profile), NULL); if (GST_IS_ENCODING_CONTAINER_PROFILE (profile)) { GstCaps *res = gst_caps_new_empty (); for (ltmp = GST_ENCODING_CONTAINER_PROFILE (profile)->encodingprofiles; ltmp; ltmp = ltmp->next) { GstEncodingProfile *sprof = (GstEncodingProfile *) ltmp->data; res = gst_caps_merge (res, gst_encoding_profile_get_input_caps (sprof)); } return res; } fcaps = profile->format; /* fast-path */ if ((profile->restriction == NULL) || gst_caps_is_any (profile->restriction)) return gst_caps_ref (fcaps); /* Combine the format with the restriction caps */ outst = gst_caps_get_structure (fcaps, 0); out_name = gst_structure_get_name_id (outst); tmp = gst_caps_new_empty (); len = gst_caps_get_size (profile->restriction); for (i = 0; i < len; i++) { st = gst_structure_copy (gst_caps_get_structure (profile->restriction, i)); st->name = out_name; gst_caps_append_structure (tmp, st); } out = gst_caps_intersect (tmp, fcaps); gst_caps_unref (tmp); return out; }
GstCaps * gst_gl_overlay_compositor_add_caps (GstCaps * caps) { GstCaps *composition_caps; int i; composition_caps = gst_caps_copy (caps); for (i = 0; i < gst_caps_get_size (composition_caps); i++) { GstCapsFeatures *f = gst_caps_get_features (composition_caps, i); gst_caps_features_add (f, GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION); } caps = gst_caps_merge (composition_caps, caps); return caps; }
static GstCaps * gst_play_sink_convert_bin_getcaps (GstPad * pad, GstCaps * filter) { GstPlaySinkConvertBin *self = GST_PLAY_SINK_CONVERT_BIN (gst_pad_get_parent (pad)); GstCaps *ret; GstPad *otherpad, *peer; GST_PLAY_SINK_CONVERT_BIN_LOCK (self); if (pad == self->srcpad) { otherpad = self->sinkpad; } else if (pad == self->sinkpad) { otherpad = self->srcpad; } else { GST_ERROR_OBJECT (pad, "Not one of our pads"); otherpad = NULL; } if (otherpad) { peer = gst_pad_get_peer (otherpad); if (peer) { GstCaps *peer_caps = gst_pad_query_caps (peer, filter); gst_object_unref (peer); if (self->converter_caps && is_raw_caps (peer_caps, self->audio)) { ret = gst_caps_merge (peer_caps, gst_caps_ref (self->converter_caps)); } else { ret = peer_caps; } } else { ret = gst_caps_ref (self->converter_caps); } } else { ret = gst_caps_new_any (); } GST_PLAY_SINK_CONVERT_BIN_UNLOCK (self); gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret); return ret; }
/* * Takes caps and copies its video fields to tmpl_caps */ static GstCaps * __gst_video_element_proxy_caps (GstElement * element, GstCaps * templ_caps, GstCaps * caps) { GstCaps *result = gst_caps_new_empty (); gint i, j; gint templ_caps_size = gst_caps_get_size (templ_caps); gint caps_size = gst_caps_get_size (caps); for (i = 0; i < templ_caps_size; i++) { GQuark q_name = gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i)); GstCapsFeatures *features = gst_caps_get_features (templ_caps, i); for (j = 0; j < caps_size; j++) { const GstStructure *caps_s = gst_caps_get_structure (caps, j); const GValue *val; GstStructure *s; GstCaps *tmp = gst_caps_new_empty (); s = gst_structure_new_id_empty (q_name); if ((val = gst_structure_get_value (caps_s, "width"))) gst_structure_set_value (s, "width", val); if ((val = gst_structure_get_value (caps_s, "height"))) gst_structure_set_value (s, "height", val); if ((val = gst_structure_get_value (caps_s, "framerate"))) gst_structure_set_value (s, "framerate", val); if ((val = gst_structure_get_value (caps_s, "pixel-aspect-ratio"))) gst_structure_set_value (s, "pixel-aspect-ratio", val); if ((val = gst_structure_get_value (caps_s, "colorimetry"))) gst_structure_set_value (s, "colorimetry", val); if ((val = gst_structure_get_value (caps_s, "chroma-site"))) gst_structure_set_value (s, "chroma-site", val); gst_caps_append_structure_full (tmp, s, gst_caps_features_copy (features)); result = gst_caps_merge (result, tmp); } } return result; }
static gboolean ensure_allowed_caps (GstKMSSink * self, drmModePlane * plane, drmModeRes * res) { GstCaps *out_caps, *caps; int i; GstVideoFormat fmt; const gchar *format; if (self->allowed_caps) return TRUE; out_caps = gst_caps_new_empty (); if (!out_caps) return FALSE; for (i = 0; i < plane->count_formats; i++) { fmt = gst_video_format_from_drm (plane->formats[i]); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) { GST_INFO_OBJECT (self, "ignoring format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (plane->formats[i])); continue; } format = gst_video_format_to_string (fmt); caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", GST_TYPE_INT_RANGE, res->min_width, res->max_width, "height", GST_TYPE_INT_RANGE, res->min_height, res->max_height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); if (!caps) continue; out_caps = gst_caps_merge (out_caps, caps); } self->allowed_caps = gst_caps_simplify (out_caps); GST_DEBUG_OBJECT (self, "allowed caps = %" GST_PTR_FORMAT, self->allowed_caps); return TRUE; }
/* * Takes caps and copies its audio fields to tmpl_caps */ static GstCaps * __gst_audio_element_proxy_caps (GstElement * element, GstCaps * templ_caps, GstCaps * caps) { GstCaps *result = gst_caps_new_empty (); gint i, j; gint templ_caps_size = gst_caps_get_size (templ_caps); gint caps_size = gst_caps_get_size (caps); for (i = 0; i < templ_caps_size; i++) { GQuark q_name = gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i)); GstCapsFeatures *features = gst_caps_get_features (templ_caps, i); for (j = 0; j < caps_size; j++) { const GstStructure *caps_s = gst_caps_get_structure (caps, j); const GValue *val; GstStructure *s; GstCaps *tmp = gst_caps_new_empty (); s = gst_structure_new_id_empty (q_name); if ((val = gst_structure_get_value (caps_s, "rate"))) gst_structure_set_value (s, "rate", val); if ((val = gst_structure_get_value (caps_s, "channels"))) gst_structure_set_value (s, "channels", val); if ((val = gst_structure_get_value (caps_s, "channels-mask"))) gst_structure_set_value (s, "channels-mask", val); gst_caps_append_structure_full (tmp, s, gst_caps_features_copy (features)); result = gst_caps_merge (result, tmp); } } return result; }
/* if element caps already in list, will make sure Transform elements have * priority and replace old ones */ static GList * create_codec_cap_list (GstElementFactory *factory, GstPadDirection direction, GList *list, GstCaps *rtp_caps) { const GList *pads = gst_element_factory_get_static_pad_templates (factory); gint i; /* Let us look at each pad for stuff to add*/ while (pads) { GstCaps *caps = NULL; GstStaticPadTemplate *padtemplate = NULL; padtemplate = (GstStaticPadTemplate *) (pads->data); pads = g_list_next (pads); if (padtemplate->direction != direction) continue; if (padtemplate->presence != GST_PAD_ALWAYS) { continue; } caps = gst_static_pad_template_get_caps (padtemplate); /* DEBUG ("%s caps are %s", gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)), gst_caps_to_string (caps)); */ /* skips caps ANY */ if (!caps || gst_caps_is_any (caps)) { goto done; } /* let us add one entry to the list per media type */ for (i = 0; i < gst_caps_get_size (caps); i++) { CodecCap *entry = NULL; GList *found_item = NULL; GstStructure *structure = gst_caps_get_structure (caps, i); GstCaps *cur_caps = NULL; /* FIXME fix this in gstreamer! The rtpdepay element is bogus, it claims to * be a depayloader yet has application/x-rtp on both sides and does * absolutely nothing */ /* Let's check if media caps are really media caps, this is to deal with * wierd elements such as rtpdepay that says it's a depayloader but has * application/x-rtp on src and sink pads */ const gchar *name = gst_structure_get_name (structure); if (g_ascii_strcasecmp (name, "application/x-rtp") == 0) { GST_DEBUG ("skipping %s : %s", gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)), name); continue; } cur_caps = gst_caps_new_full (gst_structure_copy (structure), NULL); /* let's check if this caps is already in the list, if so let's replace * that CodecCap list instead of creating a new one */ /* we need to compare both media caps and rtp caps */ found_item = g_list_find_custom (list, cur_caps, (GCompareFunc)compare_media_caps); if (found_item) { entry = (CodecCap *)found_item->data; /* if RTP caps exist and don't match nullify entry */ if (rtp_caps && compare_rtp_caps (found_item->data, rtp_caps)) { entry = NULL; } } if (!entry) { entry = g_slice_new0 (CodecCap); entry->caps = cur_caps; if (rtp_caps) { entry->rtp_caps = rtp_caps; gst_caps_ref (rtp_caps); } list = g_list_append (list, entry); entry->element_list1 = g_list_prepend (NULL, g_list_prepend (NULL, factory)); gst_object_ref (factory); } else { entry->element_list1->data = g_list_append (entry->element_list1->data, factory); gst_object_ref (factory); if (rtp_caps) { if (entry->rtp_caps) { GstCaps *tmp = gst_caps_intersect (rtp_caps, entry->rtp_caps); gst_caps_unref (entry->rtp_caps); entry->rtp_caps = tmp; } else { entry->rtp_caps = gst_caps_ref (rtp_caps); /* This shouldn't happen, its we're looking at rtp elements * or we're not */ g_assert_not_reached (); } } entry->caps = gst_caps_merge (cur_caps, entry->caps); } } done: if (caps != NULL) { gst_caps_unref (caps); } } return list; }
static gboolean rsndec_factory_filter (GstPluginFeature * feature, RsnDecFactoryFilterCtx * ctx) { GstElementFactory *factory; guint rank; const gchar *klass; const GList *templates; GList *walk; gboolean can_sink = FALSE; /* we only care about element factories */ if (!GST_IS_ELEMENT_FACTORY (feature)) return FALSE; factory = GST_ELEMENT_FACTORY (feature); klass = gst_element_factory_get_klass (factory); /* only decoders can play */ if (strstr (klass, "Decoder") == NULL) return FALSE; /* only select elements with autoplugging rank */ rank = gst_plugin_feature_get_rank (feature); if (rank < GST_RANK_MARGINAL) return FALSE; /* See if the element has a sink pad that can possibly sink this caps */ /* get the templates from the element factory */ templates = gst_element_factory_get_static_pad_templates (factory); for (walk = (GList *) templates; walk && !can_sink; walk = g_list_next (walk)) { GstStaticPadTemplate *templ = walk->data; /* we only care about the sink templates */ if (templ->direction == GST_PAD_SINK) { GstCaps *intersect; GstCaps *tmpl_caps; /* try to intersect the caps with the caps of the template */ tmpl_caps = gst_static_caps_get (&templ->static_caps); intersect = gst_caps_intersect (ctx->desired_caps, tmpl_caps); gst_caps_unref (tmpl_caps); /* check if the intersection is empty */ if (!gst_caps_is_empty (intersect)) { /* non empty intersection, we can use this element */ can_sink = TRUE; ctx->decoder_caps = gst_caps_merge (ctx->decoder_caps, intersect); } else gst_caps_unref (intersect); } } if (can_sink) { GST_DEBUG ("Found decoder element %s (%s)", gst_element_factory_get_longname (factory), gst_plugin_feature_get_name (feature)); } return can_sink; }
static gboolean ensure_allowed_caps (GstKMSSink * self, drmModeConnector * conn, drmModePlane * plane, drmModeRes * res) { GstCaps *out_caps, *tmp_caps, *caps; int i, j; GstVideoFormat fmt; const gchar *format; drmModeModeInfo *mode; gint count_modes; if (self->allowed_caps) return TRUE; out_caps = gst_caps_new_empty (); if (!out_caps) return FALSE; if (conn && self->modesetting_enabled) count_modes = conn->count_modes; else count_modes = 1; for (i = 0; i < count_modes; i++) { tmp_caps = gst_caps_new_empty (); if (!tmp_caps) return FALSE; mode = NULL; if (conn && self->modesetting_enabled) mode = &conn->modes[i]; for (j = 0; j < plane->count_formats; j++) { fmt = gst_video_format_from_drm (plane->formats[j]); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) { GST_INFO_OBJECT (self, "ignoring format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (plane->formats[j])); continue; } format = gst_video_format_to_string (fmt); if (mode) { caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", G_TYPE_INT, mode->hdisplay, "height", G_TYPE_INT, mode->vdisplay, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } else { caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", GST_TYPE_INT_RANGE, res->min_width, res->max_width, "height", GST_TYPE_INT_RANGE, res->min_height, res->max_height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } if (!caps) continue; tmp_caps = gst_caps_merge (tmp_caps, caps); } out_caps = gst_caps_merge (out_caps, gst_caps_simplify (tmp_caps)); } self->allowed_caps = gst_caps_simplify (out_caps); GST_DEBUG_OBJECT (self, "allowed caps = %" GST_PTR_FORMAT, self->allowed_caps); return (self->allowed_caps && !gst_caps_is_empty (self->allowed_caps)); }
static gboolean gst_vaapidecode_ensure_allowed_sinkpad_caps (GstVaapiDecode * decode) { GstCaps *caps, *allowed_sinkpad_caps; GArray *profiles; guint i; gboolean base_only = FALSE; gboolean have_high = FALSE; gboolean have_mvc = FALSE; gboolean have_svc = FALSE; profiles = gst_vaapi_display_get_decode_profiles (GST_VAAPI_PLUGIN_BASE_DISPLAY (decode)); if (!profiles) goto error_no_profiles; allowed_sinkpad_caps = gst_caps_new_empty (); if (!allowed_sinkpad_caps) goto error_no_memory; if (g_object_class_find_property (G_OBJECT_GET_CLASS (decode), "base-only")) { g_object_get (decode, "base-only", &base_only, NULL); } for (i = 0; i < profiles->len; i++) { const GstVaapiProfile profile = g_array_index (profiles, GstVaapiProfile, i); const gchar *media_type_name; const gchar *profile_name; GstStructure *structure; media_type_name = gst_vaapi_profile_get_media_type_name (profile); if (!media_type_name) continue; caps = gst_caps_from_string (media_type_name); if (!caps) continue; structure = gst_caps_get_structure (caps, 0); profile_name = gst_vaapi_profile_get_name (profile); if (profile_name) gst_structure_set (structure, "profile", G_TYPE_STRING, profile_name, NULL); allowed_sinkpad_caps = gst_caps_merge (allowed_sinkpad_caps, caps); have_mvc |= is_mvc_profile (profile); have_svc |= is_svc_profile (profile); have_high |= profile == GST_VAAPI_PROFILE_H264_HIGH; } if (have_high) { allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "progressive-high"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "constrained-high"); } if (base_only && (!have_mvc || !have_svc) && have_high) { if (!have_mvc) { GST_DEBUG ("base_only: force adding MVC profiles in caps"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "multiview-high"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "stereo-high"); } if (!have_svc) { GST_DEBUG ("base_only: force adding SVC profiles in caps"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-constrained-baseline"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-baseline"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-high-intra"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-constrained-high"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-high"); } } decode->allowed_sinkpad_caps = gst_caps_simplify (allowed_sinkpad_caps); g_array_unref (profiles); return TRUE; /* ERRORS */ error_no_profiles: { GST_ERROR ("failed to retrieve VA decode profiles"); return FALSE; } error_no_memory: { GST_ERROR ("failed to allocate allowed-caps set"); g_array_unref (profiles); return FALSE; } }
/* returns the intersection of two lists */ static GList * codec_cap_list_intersect (GList *list1, GList *list2) { GList *walk1, *walk2; CodecCap *codec_cap1, *codec_cap2; GstCaps *caps1, *caps2; GstCaps *rtp_caps1, *rtp_caps2; GList *intersection_list = NULL; for (walk1 = g_list_first (list1); walk1; walk1 = g_list_next (walk1)) { CodecCap *item = NULL; codec_cap1 = (CodecCap *)(walk1->data); caps1 = codec_cap1->caps; rtp_caps1 = codec_cap1->rtp_caps; for (walk2 = list2; walk2; walk2 = g_list_next (walk2)) { GstCaps *intersection = NULL; GstCaps *rtp_intersection = NULL; codec_cap2 = (CodecCap *)(walk2->data); caps2 = codec_cap2->caps; rtp_caps2 = codec_cap2->rtp_caps; //g_debug ("intersecting %s AND %s", gst_caps_to_string (caps1), gst_caps_to_string (caps2)); intersection = gst_caps_intersect (caps1, caps2); if (rtp_caps1 && rtp_caps2) { //g_debug ("RTP intersecting %s AND %s", gst_caps_to_string (rtp_caps1), gst_caps_to_string (rtp_caps2)); rtp_intersection = gst_caps_intersect (rtp_caps1, rtp_caps2); } if (!gst_caps_is_empty (intersection) && (rtp_intersection == NULL || !gst_caps_is_empty (rtp_intersection))) { if (item) { GList *tmplist; item->caps = gst_caps_merge (item->caps, intersection); for (tmplist = g_list_first (codec_cap2->element_list1->data); tmplist; tmplist = g_list_next (tmplist)) { if (g_list_index (item->element_list2->data, tmplist->data) < 0) { item->element_list2->data = g_list_concat ( item->element_list2->data, g_list_copy (codec_cap2->element_list1->data)); g_list_foreach (codec_cap2->element_list1->data, (GFunc) gst_object_ref, NULL); } } } else { item = g_slice_new0 (CodecCap); item->caps = intersection; if (rtp_caps1 && rtp_caps2) { item->rtp_caps = rtp_intersection; } else if (rtp_caps1) { item->rtp_caps = rtp_caps1; gst_caps_ref (rtp_caps1); } else if (rtp_caps2) { item->rtp_caps = rtp_caps2; gst_caps_ref (rtp_caps2); } /* during an intersect, we concat/copy previous lists together and put them * into 1 and 2 */ item->element_list1 = g_list_concat ( copy_element_list (codec_cap1->element_list1), copy_element_list (codec_cap1->element_list2)); item->element_list2 = g_list_concat ( copy_element_list (codec_cap2->element_list1), copy_element_list (codec_cap2->element_list2)); intersection_list = g_list_append (intersection_list, item); if (rtp_intersection) { break; } } } else { if (rtp_intersection) gst_caps_unref (rtp_intersection); gst_caps_unref (intersection); } } } return intersection_list; }
static GstCaps * gst_play_sink_convert_bin_getcaps (GstPad * pad, GstCaps * filter) { GstPlaySinkConvertBin *self = GST_PLAY_SINK_CONVERT_BIN (gst_pad_get_parent (pad)); GstCaps *ret; GstPad *otherpad, *peer; GST_PLAY_SINK_CONVERT_BIN_LOCK (self); if (pad == self->srcpad) { otherpad = self->sinkpad; } else if (pad == self->sinkpad) { otherpad = self->srcpad; } else { GST_ERROR_OBJECT (pad, "Not one of our pads"); otherpad = NULL; } if (otherpad) { peer = gst_pad_get_peer (otherpad); if (peer) { GstCaps *peer_caps; GstCaps *downstream_filter = NULL; /* Add all the caps that we can convert to to the filter caps, * otherwise downstream might just return EMPTY caps because * it doesn't handle the filter caps but we could still convert * to these caps */ if (filter) { guint i, n; downstream_filter = gst_caps_new_empty (); /* Intersect raw video caps in the filter caps with the converter * caps. This makes sure that we don't accept raw video that we * can't handle, e.g. because of caps features */ n = gst_caps_get_size (filter); for (i = 0; i < n; i++) { GstStructure *s; GstCaps *tmp, *tmp2; s = gst_structure_copy (gst_caps_get_structure (filter, i)); if (gst_structure_has_name (s, self->audio ? "audio/x-raw" : "video/x-raw")) { tmp = gst_caps_new_full (s, NULL); tmp2 = gst_caps_intersect (tmp, self->converter_caps); gst_caps_append (downstream_filter, tmp2); gst_caps_unref (tmp); } else { gst_caps_append_structure (downstream_filter, s); } } downstream_filter = gst_caps_merge (downstream_filter, gst_caps_ref (self->converter_caps)); } peer_caps = gst_pad_query_caps (peer, downstream_filter); if (downstream_filter) gst_caps_unref (downstream_filter); gst_object_unref (peer); if (self->converter_caps && is_raw_caps (peer_caps, self->audio)) { GstCaps *converter_caps = gst_caps_ref (self->converter_caps); GstCapsFeatures *cf; GstStructure *s; guint i, n; ret = gst_caps_make_writable (peer_caps); /* Filter out ANY capsfeatures from the converter caps. We can't * convert to ANY capsfeatures, they are only there so that we * can passthrough whatever downstream can support... but we * definitely don't want to return them here */ n = gst_caps_get_size (converter_caps); for (i = 0; i < n; i++) { s = gst_caps_get_structure (converter_caps, i); cf = gst_caps_get_features (converter_caps, i); if (cf && gst_caps_features_is_any (cf)) continue; ret = gst_caps_merge_structure_full (ret, gst_structure_copy (s), (cf ? gst_caps_features_copy (cf) : NULL)); } gst_caps_unref (converter_caps); } else { ret = peer_caps; } } else { ret = gst_caps_ref (self->converter_caps); } GST_PLAY_SINK_CONVERT_BIN_FILTER_CAPS (filter, ret); } else { ret = filter ? gst_caps_ref (filter) : gst_caps_new_any (); } GST_PLAY_SINK_CONVERT_BIN_UNLOCK (self); gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret); return ret; }
void Caps::merge(const CapsPtr & caps2) { gst_caps_merge(object<GstCaps>(), gst_caps_copy(caps2)); }
static GstCaps * gst_play_sink_convert_bin_getcaps (GstPad * pad, GstCaps * filter) { GstPlaySinkConvertBin *self = GST_PLAY_SINK_CONVERT_BIN (gst_pad_get_parent (pad)); GstCaps *ret; GstPad *otherpad, *peer; GST_PLAY_SINK_CONVERT_BIN_LOCK (self); if (pad == self->srcpad) { otherpad = self->sinkpad; } else if (pad == self->sinkpad) { otherpad = self->srcpad; } else { GST_ERROR_OBJECT (pad, "Not one of our pads"); otherpad = NULL; } if (otherpad) { peer = gst_pad_get_peer (otherpad); if (peer) { GstCaps *peer_caps; GstCaps *downstream_filter = NULL; /* Add all the caps that we can convert to to the filter caps, * otherwise downstream might just return EMPTY caps because * it doesn't handle the filter caps but we could still convert * to these caps */ if (filter) { downstream_filter = gst_caps_copy (filter); downstream_filter = gst_caps_merge (downstream_filter, gst_caps_ref (self->converter_caps)); } peer_caps = gst_pad_query_caps (peer, downstream_filter); if (downstream_filter) gst_caps_unref (downstream_filter); gst_object_unref (peer); if (self->converter_caps && is_raw_caps (peer_caps, self->audio)) { ret = gst_caps_merge (peer_caps, gst_caps_ref (self->converter_caps)); } else { ret = peer_caps; } } else { ret = gst_caps_ref (self->converter_caps); } } else { ret = gst_caps_new_any (); } GST_PLAY_SINK_CONVERT_BIN_UNLOCK (self); gst_object_unref (self); if (filter) { GstCaps *intersection = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (ret); ret = intersection; } GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret); return ret; }