/* copies the given caps */ static GstCaps * gst_csp_caps_remove_format_info (GstCaps * caps) { GstStructure *yuvst, *rgbst, *grayst; /* We know there's only one structure since we're given simple caps */ caps = gst_caps_copy (caps); yuvst = gst_caps_get_structure (caps, 0); gst_structure_set_name (yuvst, "video/x-raw-yuv"); gst_structure_remove_fields (yuvst, "format", "endianness", "depth", "bpp", "red_mask", "green_mask", "blue_mask", "alpha_mask", "palette_data", "color-matrix", NULL); rgbst = gst_structure_copy (yuvst); gst_structure_set_name (rgbst, "video/x-raw-rgb"); gst_structure_remove_fields (rgbst, "color-matrix", "chroma-site", NULL); grayst = gst_structure_copy (rgbst); gst_structure_set_name (grayst, "video/x-raw-gray"); gst_caps_append_structure (caps, rgbst); gst_caps_append_structure (caps, grayst); return caps; }
/* copies the given caps */ static GstCaps * gst_gl_mixer_caps_remove_format_info (GstCaps * caps) { GstStructure *st; GstCapsFeatures *f; gint i, n; GstCaps *res; res = gst_caps_new_empty (); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { st = gst_caps_get_structure (caps, i); f = gst_caps_get_features (caps, i); /* If this is already expressed by the existing caps * skip this structure */ if (i > 0 && gst_caps_is_subset_structure_full (res, st, f)) continue; st = gst_structure_copy (st); /* Only remove format info for the cases when we can actually convert */ if (!gst_caps_features_is_any (f) && gst_caps_features_is_equal (f, GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)) gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site", NULL); gst_structure_remove_fields (st, "width", "height", NULL); gst_caps_append_structure_full (res, st, gst_caps_features_copy (f)); } return res; }
/* copies the given caps */ static GstCaps * gst_yuv_to_rgb_caps_remove_format_info (GstCaps * caps) { GstStructure *st; gint i, n; GstCaps *res; res = gst_caps_new_empty (); n = gst_caps_get_size (caps); for (i = 0; i < n; i++) { st = gst_caps_get_structure (caps, i); /* If this is already expressed by the existing caps * skip this structure */ if (i > 0 && gst_caps_is_subset_structure (res, st)) continue; st = gst_structure_copy (st); gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site", NULL); gst_caps_append_structure (res, st); } return res; }
static GstCaps * mulawenc_getcaps (GstPad * pad) { GstMuLawEnc *mulawenc; GstPad *otherpad; GstCaps *othercaps, *result; const GstCaps *templ; const gchar *name; gint i; mulawenc = GST_MULAWENC (GST_PAD_PARENT (pad)); /* figure out the name of the caps we are going to return */ if (pad == mulawenc->srcpad) { name = "audio/x-mulaw"; otherpad = mulawenc->sinkpad; } else { name = "audio/x-raw-int"; otherpad = mulawenc->srcpad; } /* get caps from the peer, this can return NULL when there is no peer */ othercaps = gst_pad_peer_get_caps (otherpad); /* get the template caps to make sure we return something acceptable */ templ = gst_pad_get_pad_template_caps (pad); if (othercaps) { /* there was a peer */ othercaps = gst_caps_make_writable (othercaps); /* go through the caps and remove the fields we don't want */ for (i = 0; i < gst_caps_get_size (othercaps); i++) { GstStructure *structure; structure = gst_caps_get_structure (othercaps, i); /* adjust the name */ gst_structure_set_name (structure, name); if (pad == mulawenc->srcpad) { /* remove the fields we don't want */ gst_structure_remove_fields (structure, "width", "depth", "endianness", "signed", NULL); } else { /* add fixed fields */ gst_structure_set (structure, "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE, NULL); } } /* filter against the allowed caps of the pad to return our result */ result = gst_caps_intersect (othercaps, templ); gst_caps_unref (othercaps); } else { /* there was no peer, return the template caps */ result = gst_caps_copy (templ); } return result; }
static GstPadProbeReturn kms_agnostic_bin2_sink_caps_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { KmsAgnosticBin2 *self; GstCaps *current_caps; GstCaps *new_caps = NULL; GstEvent *event = gst_pad_probe_info_get_event (info); if (GST_EVENT_TYPE (event) != GST_EVENT_CAPS) { return GST_PAD_PROBE_OK; } GST_TRACE_OBJECT (pad, "Event: %" GST_PTR_FORMAT, event); self = KMS_AGNOSTIC_BIN2 (user_data); gst_event_parse_caps (event, &new_caps); if (new_caps == NULL) { GST_ERROR_OBJECT (self, "Unexpected NULL caps"); return GST_PAD_PROBE_OK; } KMS_AGNOSTIC_BIN2_LOCK (self); current_caps = self->priv->input_caps; self->priv->input_caps = gst_caps_copy (new_caps); KMS_AGNOSTIC_BIN2_UNLOCK (self); GST_TRACE_OBJECT (user_data, "New caps event: %" GST_PTR_FORMAT, event); if (current_caps != NULL) { GstStructure *st; GST_TRACE_OBJECT (user_data, "Current caps: %" GST_PTR_FORMAT, current_caps); st = gst_caps_get_structure (current_caps, 0); // Remove famerate, width, height, streamheader that make unecessary // agnostic reconstruction happen gst_structure_remove_fields (st, "width", "height", "framerate", "streamheader", "codec_data", NULL); if (!gst_caps_can_intersect (new_caps, current_caps) && !is_raw_caps (current_caps) && !is_raw_caps (new_caps)) { GST_DEBUG_OBJECT (user_data, "Caps differ caps: %" GST_PTR_FORMAT, new_caps); kms_agnostic_bin2_configure_input (self, new_caps); } gst_caps_unref (current_caps); } else { GST_DEBUG_OBJECT (user_data, "No previous caps, starting"); kms_agnostic_bin2_configure_input (self, new_caps); } return GST_PAD_PROBE_OK; }
static GstCaps* gst_imx_blitter_video_transform_transform_caps(GstBaseTransform *transform, G_GNUC_UNUSED GstPadDirection direction, GstCaps *caps, GstCaps *filter) { GstCaps *tmpcaps1, *tmpcaps2, *result; GstStructure *structure; gint i, n; tmpcaps1 = gst_caps_new_empty(); n = gst_caps_get_size(caps); for (i = 0; i < n; i++) { structure = gst_caps_get_structure(caps, i); /* If this is already expressed by the existing caps * skip this structure */ if ((i > 0) && gst_caps_is_subset_structure(tmpcaps1, structure)) continue; /* make copy */ structure = gst_structure_copy(structure); gst_structure_set( structure, "width", GST_TYPE_INT_RANGE, 64, G_MAXINT, "height", GST_TYPE_INT_RANGE, 64, G_MAXINT, NULL ); /* colorimetry is not supported by the videotransform element */ gst_structure_remove_fields(structure, "format", "colorimetry", "chroma-site", NULL); /* if pixel aspect ratio, make a range of it */ if (gst_structure_has_field(structure, "pixel-aspect-ratio")) { gst_structure_set( structure, "pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 1, G_MAXINT, G_MAXINT, 1, NULL ); } gst_caps_append_structure(tmpcaps1, structure); } /* filter the resulting caps if necessary */ if (filter != NULL) { tmpcaps2 = gst_caps_intersect_full(filter, tmpcaps1, GST_CAPS_INTERSECT_FIRST); gst_caps_unref(tmpcaps1); tmpcaps1 = tmpcaps2; } result = tmpcaps1; GST_DEBUG_OBJECT(transform, "transformed %" GST_PTR_FORMAT " into %" GST_PTR_FORMAT, (gpointer)caps, (gpointer)result); return result; }
static gboolean gst_srtp_dec_sink_setcaps (GstPad * pad, GstObject * parent, GstCaps * caps, gboolean is_rtcp) { GstSrtpDec *filter = GST_SRTP_DEC (parent); GstPad *otherpad; GstStructure *ps; gboolean ret = FALSE; g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); ps = gst_caps_get_structure (caps, 0); if (gst_structure_has_field_typed (ps, "ssrc", G_TYPE_UINT) && gst_structure_has_field_typed (ps, "roc", G_TYPE_UINT) && gst_structure_has_field_typed (ps, "srtp-cipher", G_TYPE_STRING) && gst_structure_has_field_typed (ps, "srtp-auth", G_TYPE_STRING) && gst_structure_has_field_typed (ps, "srtcp-cipher", G_TYPE_STRING) && gst_structure_has_field_typed (ps, "srtcp-auth", G_TYPE_STRING)) { guint ssrc; gst_structure_get_uint (ps, "ssrc", &ssrc); if (!update_session_stream_from_caps (filter, ssrc, caps)) { GST_WARNING_OBJECT (pad, "Could not create session from pad caps: %" GST_PTR_FORMAT, caps); return FALSE; } } caps = gst_caps_copy (caps); ps = gst_caps_get_structure (caps, 0); gst_structure_remove_fields (ps, "srtp-key", "srtp-cipher", "srtp-auth", "srtcp-cipher", "srtcp-auth", NULL); if (is_rtcp) gst_structure_set_name (ps, "application/x-rtcp"); else gst_structure_set_name (ps, "application/x-rtp"); otherpad = gst_pad_get_element_private (pad); ret = gst_pad_set_caps (otherpad, caps); gst_caps_unref (caps); return ret; }
void ges_base_xml_formatter_add_track (GESBaseXmlFormatter * self, GESTrackType track_type, GstCaps * caps, const gchar * id, GstStructure * properties, const gchar * metadatas, GError ** error) { GESTrack *track; GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self); if (priv->check_only) { if (caps) gst_caps_unref (caps); return; } track = ges_track_new (track_type, caps); ges_timeline_add_track (GES_FORMATTER (self)->timeline, track); if (properties) { gchar *restriction; GstCaps *caps; gst_structure_get (properties, "restriction-caps", G_TYPE_STRING, &restriction, NULL); gst_structure_remove_fields (properties, "restriction-caps", "caps", "message-forward", NULL); if (g_strcmp0 (restriction, "NULL")) { caps = gst_caps_from_string (restriction); ges_track_set_restriction_caps (track, caps); } gst_structure_foreach (properties, (GstStructureForeachFunc) set_property_foreach, track); } g_hash_table_insert (priv->tracks, g_strdup (id), gst_object_ref (track)); if (metadatas) ges_meta_container_add_metas_from_string (GES_META_CONTAINER (track), metadatas); }
static gboolean gst_vaapidecode_update_src_caps (GstVaapiDecode * decode) { GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode); GstPad *const srcpad = GST_VIDEO_DECODER_SRC_PAD (vdec); GstCaps *allowed; GstVideoCodecState *state, *ref_state; GstVaapiCapsFeature feature; GstCapsFeatures *features; GstCaps *allocation_caps; GstVideoInfo *vi; GstVideoFormat format; GstClockTime latency; gint fps_d, fps_n; guint width, height; const gchar *format_str, *feature_str; if (!decode->input_state) return FALSE; ref_state = decode->input_state; format = GST_VIDEO_INFO_FORMAT (&decode->decoded_info); allowed = gst_vaapidecode_get_allowed_srcpad_caps (decode); feature = gst_vaapi_find_preferred_caps_feature (srcpad, allowed, &format); gst_caps_unref (allowed); if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED) return FALSE; #if (!USE_GLX && !USE_EGL) /* This is a very pathological situation. Should not happen. */ if (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META) return FALSE; #endif if ((feature == GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY || feature == GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE) && format != GST_VIDEO_INFO_FORMAT (&decode->decoded_info)) { GST_FIXME_OBJECT (decode, "validate if driver can convert from %s to %s", gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&decode->decoded_info)), gst_video_format_to_string (format)); } width = decode->display_width; height = decode->display_height; if (!width || !height) { width = GST_VIDEO_INFO_WIDTH (&ref_state->info); height = GST_VIDEO_INFO_HEIGHT (&ref_state->info); } state = gst_video_decoder_set_output_state (vdec, format, width, height, ref_state); if (!state) return FALSE; if (GST_VIDEO_INFO_WIDTH (&state->info) == 0 || GST_VIDEO_INFO_HEIGHT (&state->info) == 0) { gst_video_codec_state_unref (state); return FALSE; } vi = &state->info; state->caps = gst_video_info_to_caps (vi); switch (feature) { case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META: case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE:{ GstStructure *structure = gst_caps_get_structure (state->caps, 0); /* Remove chroma-site and colorimetry from src caps, * which is unnecessary on downstream if using VASurface */ gst_structure_remove_fields (structure, "chroma-site", "colorimetry", NULL); feature_str = gst_vaapi_caps_feature_to_string (feature); features = gst_caps_features_new (feature_str, NULL); gst_caps_set_features (state->caps, 0, features); break; } default: break; } /* Allocation query is different from pad's caps */ allocation_caps = NULL; if (GST_VIDEO_INFO_WIDTH (&decode->decoded_info) != width || GST_VIDEO_INFO_HEIGHT (&decode->decoded_info) != height) { allocation_caps = gst_caps_copy (state->caps); format_str = gst_video_format_to_string (format); gst_caps_set_simple (allocation_caps, "width", G_TYPE_INT, GST_VIDEO_INFO_WIDTH (&decode->decoded_info), "height", G_TYPE_INT, GST_VIDEO_INFO_HEIGHT (&decode->decoded_info), "format", G_TYPE_STRING, format_str, NULL); GST_INFO_OBJECT (decode, "new alloc caps = %" GST_PTR_FORMAT, allocation_caps); } gst_caps_replace (&state->allocation_caps, allocation_caps); if (allocation_caps) gst_caps_unref (allocation_caps); GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps); gst_caps_replace (&decode->srcpad_caps, state->caps); gst_video_codec_state_unref (state); fps_n = GST_VIDEO_INFO_FPS_N (vi); fps_d = GST_VIDEO_INFO_FPS_D (vi); if (fps_n <= 0 || fps_d <= 0) { GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation"); fps_n = 25; fps_d = 1; } /* For parsing/preparation purposes we'd need at least 1 frame * latency in general, with perfectly known unit boundaries (NALU, * AU), and up to 2 frames when we need to wait for the second frame * start to determine the first frame is complete */ latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n); gst_video_decoder_set_latency (vdec, latency, latency); return TRUE; }
void ges_base_xml_formatter_add_clip (GESBaseXmlFormatter * self, const gchar * id, const char *asset_id, GType type, GstClockTime start, GstClockTime inpoint, GstClockTime duration, guint layer_prio, GESTrackType track_types, GstStructure * properties, const gchar * metadatas, GError ** error) { GESAsset *asset; GESClip *nclip; LayerEntry *entry; GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self); if (priv->check_only) return; entry = g_hash_table_lookup (priv->layers, GINT_TO_POINTER (layer_prio)); if (entry == NULL) { g_set_error (error, GES_ERROR, GES_ERROR_FORMATTER_MALFORMED_INPUT_FILE, "We got a Clip in a layer" " that does not exist, something is wrong either in the project file or" " in %s", g_type_name (G_OBJECT_TYPE (self))); return; } /* We do not want the properties that are passed to layer-add_asset to be reset */ if (properties) gst_structure_remove_fields (properties, "supported-formats", "inpoint", "start", "duration", NULL); asset = ges_asset_request (type, asset_id, NULL); if (asset == NULL) { gchar *real_id; PendingClip *pclip; GList *pendings; real_id = ges_extractable_type_check_id (type, asset_id, error); if (real_id == NULL) { if (*error == NULL) g_set_error (error, G_MARKUP_ERROR, G_MARKUP_ERROR_INVALID_CONTENT, "Object type '%s' with Asset id: %s not be created'", g_type_name (type), asset_id); return; } pendings = g_hash_table_lookup (priv->assetid_pendingclips, asset_id); pclip = g_slice_new0 (PendingClip); GST_DEBUG_OBJECT (self, "Adding pending %p for %s, currently: %i", pclip, asset_id, g_list_length (pendings)); pclip->id = g_strdup (id); pclip->track_types = track_types; pclip->duration = duration; pclip->inpoint = inpoint; pclip->start = start; pclip->layer = gst_object_ref (entry->layer); pclip->properties = properties ? gst_structure_copy (properties) : NULL; pclip->metadatas = g_strdup (metadatas); /* Add the new pending object to the hashtable */ g_hash_table_insert (priv->assetid_pendingclips, real_id, g_list_append (pendings, pclip)); g_hash_table_insert (priv->clipid_pendings, g_strdup (id), pclip); priv->current_clip = NULL; priv->current_pending_clip = pclip; return; } nclip = _add_object_to_layer (priv, id, entry->layer, asset, start, inpoint, duration, track_types, metadatas, properties); if (!nclip) return; priv->current_clip = nclip; }
static gboolean gst_srtp_dec_sink_query (GstPad * pad, GstObject * parent, GstQuery * query, gboolean is_rtcp) { switch (GST_QUERY_TYPE (query)) { case GST_QUERY_CAPS: { GstCaps *filter = NULL; GstCaps *other_filter = NULL; GstCaps *template_caps; GstPad *otherpad; GstCaps *other_caps; GstCaps *ret; int i; gst_query_parse_caps (query, &filter); otherpad = (GstPad *) gst_pad_get_element_private (pad); if (filter) { other_filter = gst_caps_copy (filter); for (i = 0; i < gst_caps_get_size (other_filter); i++) { GstStructure *ps = gst_caps_get_structure (other_filter, i); if (is_rtcp) gst_structure_set_name (ps, "application/x-rtcp"); else gst_structure_set_name (ps, "application/x-rtp"); gst_structure_remove_fields (ps, "srtp-key", "srtp-cipher", "srtp-auth", "srtcp-cipher", "srtcp-auth", NULL); } } other_caps = gst_pad_peer_query_caps (otherpad, other_filter); if (other_filter) gst_caps_unref (other_filter); if (!other_caps) { goto return_template; } template_caps = gst_pad_get_pad_template_caps (otherpad); ret = gst_caps_intersect_full (other_caps, template_caps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (other_caps); gst_caps_unref (template_caps); ret = gst_caps_make_writable (ret); for (i = 0; i < gst_caps_get_size (ret); i++) { GstStructure *ps = gst_caps_get_structure (ret, i); if (is_rtcp) gst_structure_set_name (ps, "application/x-srtcp"); else gst_structure_set_name (ps, "application/x-srtp"); } if (filter) { GstCaps *tmp; tmp = gst_caps_intersect (ret, filter); gst_caps_unref (ret); ret = tmp; } gst_query_set_caps_result (query, ret); gst_caps_unref (ret); return TRUE; return_template: ret = gst_pad_get_pad_template_caps (pad); gst_query_set_caps_result (query, ret); gst_caps_unref (ret); return TRUE; } default: return gst_pad_query_default (pad, parent, query); } }
static gboolean gst_osx_audio_sink_allowed_caps (GstOsxAudioSink * osxsink) { gint i, channels; gboolean spdif_allowed; AudioChannelLayout *layout; GstElementClass *element_class; GstPadTemplate *pad_template; GstCaps *caps, *in_caps; guint64 channel_mask = 0; GstAudioChannelPosition *pos = osxsink->channel_positions; /* First collect info about the HW capabilites and preferences */ spdif_allowed = gst_core_audio_audio_device_is_spdif_avail (osxsink->device_id); layout = gst_core_audio_audio_device_get_channel_layout (osxsink->device_id); GST_DEBUG_OBJECT (osxsink, "Selected device ID: %u SPDIF allowed: %d", (unsigned) osxsink->device_id, spdif_allowed); if (layout) { channels = MIN (layout->mNumberChannelDescriptions, GST_OSX_AUDIO_MAX_CHANNEL); } else { GST_WARNING_OBJECT (osxsink, "This driver does not support " "kAudioDevicePropertyPreferredChannelLayout."); channels = 2; } switch (channels) { case 0: pos[0] = GST_AUDIO_CHANNEL_POSITION_NONE; break; case 1: pos[0] = GST_AUDIO_CHANNEL_POSITION_MONO; break; case 2: pos[0] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT; pos[1] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT; channel_mask |= GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_LEFT); channel_mask |= GST_AUDIO_CHANNEL_POSITION_MASK (FRONT_RIGHT); break; default: channels = MIN (layout->mNumberChannelDescriptions, GST_OSX_AUDIO_MAX_CHANNEL); for (i = 0; i < channels; i++) { switch (layout->mChannelDescriptions[i].mChannelLabel) { case kAudioChannelLabel_Left: pos[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT; break; case kAudioChannelLabel_Right: pos[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT; break; case kAudioChannelLabel_Center: pos[i] = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER; break; case kAudioChannelLabel_LFEScreen: pos[i] = GST_AUDIO_CHANNEL_POSITION_LFE1; break; case kAudioChannelLabel_LeftSurround: pos[i] = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT; break; case kAudioChannelLabel_RightSurround: pos[i] = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT; break; case kAudioChannelLabel_RearSurroundLeft: pos[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT; break; case kAudioChannelLabel_RearSurroundRight: pos[i] = GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT; break; case kAudioChannelLabel_CenterSurround: pos[i] = GST_AUDIO_CHANNEL_POSITION_REAR_CENTER; break; default: GST_WARNING_OBJECT (osxsink, "unrecognized channel: %d", (int) layout->mChannelDescriptions[i].mChannelLabel); channel_mask = 0; channels = 2; break; } } } g_free (layout); /* Recover the template caps */ element_class = GST_ELEMENT_GET_CLASS (osxsink); pad_template = gst_element_class_get_pad_template (element_class, "sink"); in_caps = gst_pad_template_get_caps (pad_template); /* Create the allowed subset */ caps = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (in_caps); i++) { GstStructure *in_s, *out_s; in_s = gst_caps_get_structure (in_caps, i); if (gst_structure_has_name (in_s, "audio/x-ac3") || gst_structure_has_name (in_s, "audio/x-dts")) { if (spdif_allowed) { gst_caps_append_structure (caps, gst_structure_copy (in_s)); } } gst_audio_channel_positions_to_mask (pos, channels, false, &channel_mask); out_s = gst_structure_copy (in_s); gst_structure_remove_fields (out_s, "channels", "channel-mask", NULL); gst_structure_set (out_s, "channels", G_TYPE_INT, channels, "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL); gst_caps_append_structure (caps, out_s); } if (osxsink->cached_caps) { gst_caps_unref (osxsink->cached_caps); } osxsink->cached_caps = caps; osxsink->channels = channels; return TRUE; }
static GstCaps * gst_shape_wipe_src_getcaps (GstPad * pad) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; if (GST_PAD_CAPS (pad)) return gst_caps_copy (GST_PAD_CAPS (pad)); else if (GST_PAD_CAPS (self->video_sinkpad)) return gst_caps_copy (GST_PAD_CAPS (self->video_sinkpad)); tmp = gst_pad_peer_get_caps (self->video_sinkpad); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_caps_copy (gst_pad_get_pad_template_caps (self->video_sinkpad)); } tmp = gst_pad_peer_get_caps (pad); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } if (self->height && self->width) { guint i, n; n = gst_caps_get_size (ret); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); gst_structure_set (s, "width", G_TYPE_INT, self->width, "height", G_TYPE_INT, self->height, NULL); } } tmp = gst_pad_peer_get_caps (self->mask_sinkpad); if (tmp) { GstCaps *intersection, *tmp2; guint i, n; tmp = gst_caps_make_writable (tmp); tmp2 = gst_caps_copy (gst_pad_get_pad_template_caps (self->mask_sinkpad)); intersection = gst_caps_intersect (tmp, tmp2); gst_caps_unref (tmp); gst_caps_unref (tmp2); tmp = intersection; n = gst_caps_get_size (tmp); tmp2 = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (tmp, i); GstStructure *c; gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness", "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask", NULL); gst_structure_set_name (s, "video/x-raw-yuv"); c = gst_structure_copy (s); gst_caps_append_structure (tmp2, c); } gst_caps_append (tmp, tmp2); intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_shape_wipe_mask_sink_getcaps (GstPad * pad) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; guint i, n; if (GST_PAD_CAPS (pad)) return gst_caps_copy (GST_PAD_CAPS (pad)); tmp = gst_pad_peer_get_caps (self->video_sinkpad); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_caps_copy (gst_pad_get_pad_template_caps (self->video_sinkpad)); } tmp = gst_pad_peer_get_caps (self->srcpad); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (ret, tmp); gst_caps_unref (ret); gst_caps_unref (tmp); ret = intersection; } n = gst_caps_get_size (ret); tmp = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); GstStructure *t; gst_structure_set_name (s, "video/x-raw-gray"); gst_structure_remove_fields (s, "format", "framerate", "bpp", "depth", "endianness", "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask", NULL); if (self->width && self->height) gst_structure_set (s, "width", G_TYPE_INT, self->width, "height", G_TYPE_INT, self->height, NULL); gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); t = gst_structure_copy (s); gst_structure_set (s, "bpp", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, "endianness", G_TYPE_INT, G_BYTE_ORDER, NULL); gst_structure_set (t, "bpp", G_TYPE_INT, 8, "depth", G_TYPE_INT, 8, NULL); gst_caps_append_structure (tmp, t); } gst_caps_append (ret, tmp); tmp = gst_pad_peer_get_caps (pad); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_shape_wipe_mask_sink_getcaps (GstPad * pad, GstCaps * filter) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; guint i, n; if (gst_pad_has_current_caps (pad)) return gst_pad_get_current_caps (pad); tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_pad_get_pad_template_caps (self->video_sinkpad); } GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; tmp = gst_pad_peer_query_caps (self->srcpad, NULL); GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (ret, tmp); gst_caps_unref (ret); gst_caps_unref (tmp); ret = intersection; } GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; n = gst_caps_get_size (ret); tmp = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); GstStructure *t; gst_structure_set_name (s, "video/x-raw"); gst_structure_remove_fields (s, "format", "framerate", NULL); if (self->vinfo.width && self->vinfo.height) gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height", G_TYPE_INT, self->vinfo.height, NULL); gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); t = gst_structure_copy (s); gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL); gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL); gst_caps_append_structure (tmp, t); } gst_caps_append (ret, tmp); tmp = gst_pad_peer_query_caps (pad, NULL); GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } done: gst_object_unref (self); GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *templ, *ret, *tmp; if (gst_pad_has_current_caps (pad)) return gst_pad_get_current_caps (pad); else if (gst_pad_has_current_caps (self->video_sinkpad)) return gst_pad_get_current_caps (self->video_sinkpad); templ = gst_pad_get_pad_template_caps (self->video_sinkpad); tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL); if (tmp) { ret = gst_caps_intersect (tmp, templ); gst_caps_unref (templ); gst_caps_unref (tmp); } else { ret = templ; } GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; tmp = gst_pad_peer_query_caps (pad, NULL); GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; if (self->vinfo.height && self->vinfo.width) { guint i, n; ret = gst_caps_make_writable (ret); n = gst_caps_get_size (ret); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height", G_TYPE_INT, self->vinfo.height, NULL); } } tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL); GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection, *tmp2; guint i, n; tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad); intersection = gst_caps_intersect (tmp, tmp2); gst_caps_unref (tmp); gst_caps_unref (tmp2); tmp = gst_caps_make_writable (intersection); n = gst_caps_get_size (tmp); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (tmp, i); gst_structure_remove_fields (s, "format", "framerate", NULL); gst_structure_set_name (s, "video/x-raw"); } intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } done: gst_object_unref (self); GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
int main (int argc, char *argv[]) { gst_init (&argc, &argv); GstElementFactory *factory = gst_element_factory_find("ffmpegcolorspace"); const GList *list = gst_element_factory_get_static_pad_templates(factory); while (NULL != list) { GstStaticPadTemplate *templ = (GstStaticPadTemplate *)list->data; // name g_print("+++ template name %s\n", templ->name_template); // direction g_print ("direction: "); switch (templ->direction) { case GST_PAD_UNKNOWN: g_print ("unknown\n"); break; case GST_PAD_SRC: g_print ("src\n"); break; case GST_PAD_SINK: g_print ("sink\n"); break; default: g_print ("this is a bug\n"); break; } // presence g_print ("presence: "); switch (templ->presence) { case GST_PAD_ALWAYS: g_print ("always\n"); break; case GST_PAD_SOMETIMES: g_print ("sometimes\n"); break; case GST_PAD_REQUEST: g_print ("request\n"); break; default: g_print ("this is a bug\n"); break; } // caps GstCaps *caps = gst_static_caps_get(&templ->static_caps); // copying for removing fields in struture GstCaps *copy = gst_caps_copy(caps); gst_caps_unref(caps); guint size = gst_caps_get_size(copy); guint i = 0; g_print("size %u\n", size); for (; i < size; i++) { GstStructure *structure = gst_caps_get_structure(copy, i); gst_structure_remove_fields(structure, "format", "width", "height", "framerate", NULL); GstCaps *copy_nth = gst_caps_copy_nth(copy, i); gchar *caps_str = gst_caps_to_string(copy_nth); g_print(" caps num %u is %s\n", i, caps_str); g_free(caps_str); gst_caps_unref(copy_nth); } gst_caps_unref(copy); list = g_list_next(list); } gst_object_unref(factory); gst_deinit(); // for memory testing return 0; }