static void send_force_key_unit_event (GstPad * pad, gboolean all_headers) { GstEvent *event; GstCaps *caps = gst_pad_get_current_caps (pad); if (caps == NULL) { caps = gst_pad_get_allowed_caps (pad); } if (caps == NULL) { return; } if (is_raw_caps (caps)) { goto end; } event = gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, all_headers, 0); if (GST_PAD_DIRECTION (pad) == GST_PAD_SRC) { gst_pad_send_event (pad, event); } else { gst_pad_push_event (pad, event); } end: gst_caps_unref (caps); }
static gboolean check_bin (KmsTreeBin * tree_bin, const GstCaps * caps) { gboolean ret = FALSE; GstElement *output_tee = kms_tree_bin_get_output_tee (tree_bin); GstPad *tee_sink = gst_element_get_static_pad (output_tee, "sink"); GstCaps *current_caps = kms_tree_bin_get_input_caps (tree_bin); if (current_caps == NULL) { current_caps = gst_pad_get_allowed_caps (tee_sink); GST_TRACE_OBJECT (tree_bin, "Allowed caps are: %" GST_PTR_FORMAT, current_caps); } else { GST_TRACE_OBJECT (tree_bin, "Current caps are: %" GST_PTR_FORMAT, current_caps); } if (current_caps != NULL) { //TODO: Remove this when problem in negotiation with features will be //resolved GstCaps *caps_without_features = gst_caps_make_writable (current_caps); gst_caps_set_features (caps_without_features, 0, gst_caps_features_new_empty ()); if (gst_caps_can_intersect (caps, caps_without_features)) { ret = TRUE; } gst_caps_unref (caps_without_features); } g_object_unref (tee_sink); return ret; }
void HTTPSDPDec::httpsdpdec_pad_added_cb(GstElement* /*object */, GstPad* pad, gpointer user_data) { HTTPSDPDec* context = static_cast<HTTPSDPDec*>(user_data); std::unique_ptr<DecodebinToShmdata> decodebin = std::make_unique<DecodebinToShmdata>( context->gst_pipeline_.get(), [context](GstElement* el, const std::string& media_type, const std::string& media_label) { context->configure_shmdatasink(el, media_type, media_label); }, context->decompress_streams_); if (!decodebin->invoke_with_return<gboolean>([context](GstElement* el) { return gst_bin_add(GST_BIN(context->gst_pipeline_->get_pipeline()), el); })) { g_warning("decodebin cannot be added to pipeline"); } GstPad* sinkpad = decodebin->invoke_with_return<GstPad*>( [](GstElement* el) { return gst_element_get_static_pad(el, "sink"); }); On_scope_exit { gst_object_unref(GST_OBJECT(sinkpad)); }; GstUtils::check_pad_link_return(gst_pad_link(pad, sinkpad)); auto caps = gst_pad_get_allowed_caps(pad); On_scope_exit { gst_caps_unref(caps); }; auto structure = gst_caps_get_structure(caps, 0); auto media_label = gst_structure_get_string(structure, "media-label"); if (nullptr != media_label) decodebin->set_media_label(gst_structure_get_string(structure, "media-label")); decodebin->invoke([](GstElement* el) { GstUtils::sync_state_with_parent(el); }); context->decodebins_.push_back(std::move(decodebin)); }
/* we return the padtemplate caps with the mode field fixated to a value if we * can */ static GstCaps * gst_rtp_ilbc_pay_sink_getcaps (GstBaseRTPPayload * rtppayload, GstPad * pad) { GstCaps *otherpadcaps; GstCaps *caps; otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad); caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); if (otherpadcaps) { if (!gst_caps_is_empty (otherpadcaps)) { GstStructure *structure; const gchar *mode_str; gint mode; structure = gst_caps_get_structure (otherpadcaps, 0); /* parse mode, if we can */ mode_str = gst_structure_get_string (structure, "mode"); if (mode_str) { mode = strtol (mode_str, NULL, 10); if (mode == 20 || mode == 30) { structure = gst_caps_get_structure (caps, 0); gst_structure_set (structure, "mode", G_TYPE_INT, mode, NULL); } } } gst_caps_unref (otherpadcaps); } return caps; }
static GstCaps * gst_ac3_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter) { GstCaps *peercaps, *templ; GstCaps *res; /* FIXME: handle filter */ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse)), peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse)); if (peercaps) { guint i, n; /* Remove the framed and alignment field. We can convert * between different alignments. */ peercaps = gst_caps_make_writable (peercaps); n = gst_caps_get_size (peercaps); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (peercaps, i); gst_structure_remove_field (s, "framed"); gst_structure_remove_field (s, "alignment"); } res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (peercaps); gst_caps_unref (templ); } else { res = templ; } return res; }
gint main (gint argc, gchar ** argv) { GstCaps *caps; GstElement *sink, *identity; GstElement *pipeline; gst_init (&argc, &argv); pipeline = gst_pipeline_new ("pipeline"); g_assert (pipeline); identity = gst_element_factory_make ("identity", NULL); g_assert (identity); sink = gst_element_factory_make ("fakesink", NULL); g_assert (sink); gst_bin_add_many (GST_BIN (pipeline), identity, sink, NULL); gst_element_link_filtered (identity, sink, gst_caps_new_simple ("audio/x-raw-int", NULL)); caps = gst_pad_get_caps (gst_element_get_pad (identity, "sink")); g_print ("caps: %s\n", gst_caps_to_string (caps)); g_assert (!gst_caps_is_any (caps)); caps = gst_pad_get_allowed_caps (gst_element_get_pad (identity, "sink")); g_print ("allowed caps: %s\n", gst_caps_to_string (caps)); /* get_allowed_caps doesn't mean anything if you aren't connected */ g_assert (!caps); return 0; }
static GstCaps *sbc_enc_get_fixed_srcpad_caps(GstSbcEnc *enc) { GstCaps *caps; gboolean res = TRUE; GstCaps *result_caps = NULL; caps = gst_pad_get_allowed_caps(enc->srcpad); if (caps == NULL) caps = sbc_enc_src_getcaps(enc->srcpad); if (caps == GST_CAPS_NONE || gst_caps_is_empty(caps)) { res = FALSE; goto done; } result_caps = sbc_enc_src_caps_fixate(enc, caps); done: gst_caps_unref(caps); if (!res) return NULL; return result_caps; }
static GstCaps * gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad, GstCaps * filter) { GstCaps *otherpadcaps; GstCaps *caps; otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad); caps = gst_pad_get_pad_template_caps (pad); if (otherpadcaps) { if (!gst_caps_is_empty (otherpadcaps)) { caps = gst_caps_make_writable (caps); gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL); gst_caps_set_simple (caps, "rate", G_TYPE_INT, 16000, NULL); } gst_caps_unref (otherpadcaps); } if (filter) { GstCaps *tmp; GST_DEBUG_OBJECT (rtppayload, "Intersect %" GST_PTR_FORMAT " and filter %" GST_PTR_FORMAT, caps, filter); tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (caps); caps = tmp; } return caps; }
static gboolean gst_dtmf_src_negotiate (GstBaseSrc * basesrc) { GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (basesrc); GstCaps *caps; GstStructure *s; gboolean ret; caps = gst_pad_get_allowed_caps (GST_BASE_SRC_PAD (basesrc)); if (!caps) caps = gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (basesrc))); if (gst_caps_is_empty (caps)) return FALSE; gst_caps_truncate (caps); s = gst_caps_get_structure (caps, 0); gst_structure_fixate_field_nearest_int (s, "rate", DEFAULT_SAMPLE_RATE); if (!gst_structure_get_int (s, "rate", &dtmfsrc->sample_rate)) { GST_ERROR_OBJECT (dtmfsrc, "Could not get rate"); gst_caps_unref (caps); return FALSE; } ret = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps); gst_caps_unref (caps); return ret; }
static gboolean gst_fake_h264_parser_sink_setcaps (GstPad * pad, GstCaps * caps) { GstElement *self = GST_ELEMENT (gst_pad_get_parent (pad)); GstPad *otherpad = gst_element_get_static_pad (self, "src"); GstCaps *accepted_caps; GstStructure *s; const gchar *stream_format; accepted_caps = gst_pad_get_allowed_caps (otherpad); accepted_caps = gst_caps_make_writable (accepted_caps); gst_caps_truncate (accepted_caps); s = gst_caps_get_structure (accepted_caps, 0); stream_format = gst_structure_get_string (s, "stream-format"); if (!stream_format) gst_structure_set (s, "stream-format", G_TYPE_STRING, "avc", NULL); gst_pad_set_caps (otherpad, accepted_caps); gst_caps_unref (accepted_caps); gst_object_unref (otherpad); gst_object_unref (self); return TRUE; }
static gboolean gst_merger_setcaps_srcv (GstMerger * merger, GstCaps * caps) { GstStructure *structure; int rate, channels; gboolean ret; GstCaps *outcaps; GstCaps *othercaps; GST_DEBUG_OBJECT (merger, "the caps %" GST_PTR_FORMAT, caps); othercaps = gst_pad_get_allowed_caps (merger->srcv_pad); GST_DEBUG_OBJECT (merger, "other caps %" GST_PTR_FORMAT, othercaps); outcaps = gst_caps_copy (othercaps); gst_caps_unref (othercaps); // use framerate from source GstStructure *s_snk = gst_caps_get_structure (caps, 0); GstStructure *s_src = gst_caps_get_structure (outcaps, 0); gint num, den; gst_structure_get_fraction (s_snk, "framerate", &num, &den); gst_structure_set (s_src, "framerate", GST_TYPE_FRACTION, num, den, NULL); ret = gst_pad_set_caps (merger->srcv_pad, outcaps); GST_DEBUG_OBJECT (merger, "srcv caps %" GST_PTR_FORMAT, outcaps); gst_pad_use_fixed_caps (merger->srcv_pad); return ret; }
static gboolean check_bin (KmsTreeBin * tree_bin, const GstCaps * caps) { gboolean ret = FALSE; GstElement *output_tee = kms_tree_bin_get_output_tee (tree_bin); GstPad *tee_sink = gst_element_get_static_pad (output_tee, "sink"); GstCaps *current_caps = kms_tree_bin_get_input_caps (tree_bin); if (current_caps == NULL) { current_caps = gst_pad_get_allowed_caps (tee_sink); GST_TRACE_OBJECT (tree_bin, "Allowed caps are: %" GST_PTR_FORMAT, current_caps); } else { gst_caps_ref (current_caps); GST_TRACE_OBJECT (tree_bin, "Current caps are: %" GST_PTR_FORMAT, current_caps); } if (current_caps != NULL) { if (gst_caps_can_intersect (caps, current_caps)) { ret = TRUE; } gst_caps_unref (current_caps); } g_object_unref (tee_sink); return ret; }
/* check downstream caps to configure format and alignment */ static void gst_h264_parse_negotiate (GstH264Parse * h264parse) { GstCaps *caps; guint format = GST_H264_PARSE_FORMAT_NONE; guint align = GST_H264_PARSE_ALIGN_NONE; caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (h264parse)); GST_DEBUG_OBJECT (h264parse, "allowed caps: %" GST_PTR_FORMAT, caps); gst_h264_parse_format_from_caps (caps, &format, &align); if (caps) gst_caps_unref (caps); /* default */ if (!format) format = GST_H264_PARSE_FORMAT_BYTE; if (!align) align = GST_H264_PARSE_ALIGN_AU; GST_DEBUG_OBJECT (h264parse, "selected format %s, alignment %s", gst_h264_parse_get_string (h264parse, TRUE, format), gst_h264_parse_get_string (h264parse, FALSE, align)); h264parse->format = format; h264parse->align = align; }
static GstCaps * gst_vaapiencode_h265_get_caps (GstVaapiEncode * base_encode) { GstVaapiEncodeH265 *const encode = GST_VAAPIENCODE_H265_CAST (base_encode); GstCaps *caps, *allowed_caps; caps = gst_caps_from_string (GST_CODEC_CAPS); /* Check whether "stream-format" is hvcC mode */ allowed_caps = gst_pad_get_allowed_caps (GST_VAAPI_PLUGIN_BASE_SRC_PAD (encode)); if (allowed_caps) { const char *stream_format = NULL; GstStructure *structure; guint i, num_structures; num_structures = gst_caps_get_size (allowed_caps); for (i = 0; !stream_format && i < num_structures; i++) { structure = gst_caps_get_structure (allowed_caps, i); if (!gst_structure_has_field_typed (structure, "stream-format", G_TYPE_STRING)) continue; stream_format = gst_structure_get_string (structure, "stream-format"); } encode->is_hvc = stream_format && strcmp (stream_format, "hvc1") == 0; gst_caps_unref (allowed_caps); } gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, encode->is_hvc ? "hvc1" : "byte-stream", NULL); base_encode->need_codec_data = encode->is_hvc; /* XXX: update profile and level information */ return caps; }
static gboolean gst_mfc_dec_negotiate (GstVideoDecoder * decoder) { GstMFCDec *self = GST_MFC_DEC (decoder); GstVideoCodecState *state; GstCaps *allowed_caps; GstVideoFormat format = GST_VIDEO_FORMAT_NV12; allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_DECODER_SRC_PAD (self)); allowed_caps = gst_caps_truncate (allowed_caps); allowed_caps = gst_caps_fixate (allowed_caps); if (!gst_caps_is_empty (allowed_caps)) { const gchar *format_str; GstStructure *s = gst_caps_get_structure (allowed_caps, 0); format_str = gst_structure_get_string (s, "format"); if (format_str) format = gst_video_format_from_string (format_str); } gst_caps_unref (allowed_caps); self->format = format; state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self), self->format, self->crop_width, self->crop_height, self->input_state); gst_video_codec_state_unref (state); return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder); }
static GstCaps * kms_recorder_endpoint_allowed_caps (KmsElement * kmselement, KmsElementPadType type) { KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (kmselement); GstPad *target_pad; GstCaps *caps; switch (type) { case KMS_ELEMENT_PAD_TYPE_VIDEO: target_pad = self->priv->video_target; break; case KMS_ELEMENT_PAD_TYPE_AUDIO: target_pad = self->priv->audio_target; break; default: return NULL; } if (target_pad == NULL) { return NULL; } caps = gst_pad_get_allowed_caps (target_pad); return caps; }
static GstCaps * gst_h263_parse_get_sink_caps (GstBaseParse * parse) { GstCaps *peercaps; GstCaps *res; peercaps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (parse)); if (peercaps) { guint i, n; /* Remove parsed field */ peercaps = gst_caps_make_writable (peercaps); n = gst_caps_get_size (peercaps); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (peercaps, i); gst_structure_remove_field (s, "parsed"); } res = gst_caps_intersect_full (peercaps, gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse)), GST_CAPS_INTERSECT_FIRST); gst_caps_unref (peercaps); } else { res = gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse))); } return res; }
static gboolean gst_msdkh264enc_set_format (GstMsdkEnc * encoder) { GstMsdkH264Enc *thiz = GST_MSDKH264ENC (encoder); GstCaps *template_caps; GstCaps *allowed_caps = NULL; thiz->profile = 0; thiz->level = 0; template_caps = gst_static_pad_template_get_caps (&src_factory); allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); /* If downstream has ANY caps let encoder decide profile and level */ if (allowed_caps == template_caps) { GST_INFO_OBJECT (thiz, "downstream has ANY caps, profile/level set to auto"); } else if (allowed_caps) { GstStructure *s; const gchar *profile; const gchar *level; if (gst_caps_is_empty (allowed_caps)) { gst_caps_unref (allowed_caps); gst_caps_unref (template_caps); return FALSE; } allowed_caps = gst_caps_make_writable (allowed_caps); allowed_caps = gst_caps_fixate (allowed_caps); s = gst_caps_get_structure (allowed_caps, 0); profile = gst_structure_get_string (s, "profile"); if (profile) { if (!strcmp (profile, "high")) { thiz->profile = MFX_PROFILE_AVC_HIGH; } else if (!strcmp (profile, "main")) { thiz->profile = MFX_PROFILE_AVC_MAIN; } else if (!strcmp (profile, "baseline")) { thiz->profile = MFX_PROFILE_AVC_BASELINE; } else if (!strcmp (profile, "constrained-baseline")) { thiz->profile = MFX_PROFILE_AVC_CONSTRAINED_BASELINE; } else { g_assert_not_reached (); } } level = gst_structure_get_string (s, "level"); if (level) { thiz->level = gst_codec_utils_h264_get_level_idc (level); } gst_caps_unref (allowed_caps); } gst_caps_unref (template_caps); return TRUE; }
static gboolean gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstStructure *structure; GstCaps *output_caps, *allowed_caps, *src_caps; gboolean res; /* extract interlaced flag */ structure = gst_caps_get_structure (caps, 0); gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced); allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad); structure = gst_caps_get_structure (allowed_caps, 0); output_caps = gst_vdp_video_to_output_caps (caps); src_caps = gst_caps_intersect (output_caps, allowed_caps); gst_caps_truncate (src_caps); if (gst_caps_is_empty (src_caps)) goto invalid_caps; GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT " src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps); gst_caps_unref (output_caps); gst_caps_unref (allowed_caps); if (gst_vdp_vpp_is_interlaced (vpp)) { gint fps_n, fps_d; structure = gst_caps_get_structure (src_caps, 0); if (!gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { gst_caps_unref (src_caps); goto invalid_caps; } gst_fraction_double (&fps_n, &fps_d); gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); gst_structure_remove_field (structure, "interlaced"); vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n); } res = gst_pad_set_caps (vpp->srcpad, src_caps); done: gst_object_unref (vpp); return res; invalid_caps: GST_ERROR_OBJECT (vpp, "invalid caps: %" GST_PTR_FORMAT, caps); res = FALSE; goto done; }
static void gst_negotiation_update_caps (GstNegotiation * negotiation) { GstCaps *srccaps; GstCaps *sinkcaps; GstCaps *icaps; srccaps = gst_pad_get_allowed_caps (negotiation->srcpad); sinkcaps = gst_pad_get_allowed_caps (negotiation->sinkpad); icaps = gst_caps_intersect (srccaps, sinkcaps); gst_caps_free (srccaps); gst_caps_free (sinkcaps); gst_caps_replace (&negotiation->caps, icaps); g_object_notify (G_OBJECT (negotiation), "allowed-caps"); GST_DEBUG ("notify %" GST_PTR_FORMAT, icaps); }
static GstFlowReturn gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstVdpOutputBuffer *outbuf; GstFlowReturn ret = GST_FLOW_ERROR; GstVdpDevice *device = NULL; GstStructure *structure; gint width, height; gint chroma_type; if (!vpp->device) { /* if we haven't got a device yet we must alloc a buffer downstream to get it */ GstCaps *src_caps = gst_pad_get_allowed_caps (vpp->srcpad); gst_pad_fixate_caps (vpp->srcpad, src_caps); ret = gst_pad_alloc_buffer (vpp->srcpad, 0, 0, src_caps, (GstBuffer **) & outbuf); gst_caps_unref (src_caps); if (ret != GST_FLOW_OK) goto error; device = outbuf->device; gst_buffer_unref (GST_BUFFER (outbuf)); } else device = vpp->device; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "width", &width) || !gst_structure_get_int (structure, "height", &height) || !gst_structure_get_int (structure, "chroma-type", &chroma_type)) goto error; *buf = GST_BUFFER (gst_vdp_video_buffer_new (device, chroma_type, width, height)); if (*buf == NULL) goto error; GST_BUFFER_SIZE (*buf) = size; GST_BUFFER_OFFSET (*buf) = offset; gst_buffer_set_caps (*buf, caps); ret = GST_FLOW_OK; error: gst_object_unref (vpp); return ret; }
static gboolean start_image_capture (GstWrapperCameraBinSrc * self) { GstBaseCameraSrc *bcamsrc = GST_BASE_CAMERA_SRC (self); GstPhotography *photography = (GstPhotography *) gst_bin_get_by_interface (GST_BIN_CAST (bcamsrc), GST_TYPE_PHOTOGRAPHY); gboolean ret = FALSE; GstCaps *caps; GST_DEBUG_OBJECT (self, "Starting image capture"); gst_element_set_state (self->src_vid_src, GST_STATE_READY); if (self->image_renegotiate) { /* clean capsfilter caps so they don't interfere here */ g_object_set (self->src_filter, "caps", NULL, NULL); if (self->src_zoom_filter) g_object_set (self->src_zoom_filter, "caps", NULL, NULL); caps = gst_pad_get_allowed_caps (self->imgsrc); gst_caps_replace (&self->image_capture_caps, caps); gst_caps_unref (caps); /* FIXME - do we need to update basecamerasrc width/height somehow here? * if not, i think we need to do something about _when_ they get updated * to be sure that set_element_zoom doesn't use the wrong values */ /* We caught this event in the src pad event handler and now we want to * actually push it upstream */ gst_pad_send_event (self->outsel_imgpad, gst_event_new_reconfigure ()); self->image_renegotiate = FALSE; } if (photography) { gst_element_set_state (self->src_vid_src, GST_STATE_PLAYING); GST_DEBUG_OBJECT (self, "prepare image capture caps %" GST_PTR_FORMAT, self->image_capture_caps); ret = gst_photography_prepare_for_capture (photography, (GstPhotographyCapturePrepared) img_capture_prepared, self->image_capture_caps, self); } else { g_mutex_unlock (&bcamsrc->capturing_mutex); gst_wrapper_camera_bin_reset_video_src_caps (self, self->image_capture_caps); g_mutex_lock (&bcamsrc->capturing_mutex); ret = TRUE; gst_element_set_state (self->src_vid_src, GST_STATE_PLAYING); } return ret; }
/* Output buffer preparation... if the buffer has no caps, and * our allowed output caps is fixed, then give the caps to the * buffer. * This ensures that outgoing buffers have caps if we can, so * that pipelines like: * gst-launch filesrc location=rawsamples.raw ! * audio/x-raw-int,width=16,depth=16,rate=48000,channels=2, * endianness=4321,signed='(boolean)'true ! alsasink * will work. */ static GstFlowReturn gst_capsfilter_prepare_buf (GstBaseTransform * trans, GstBuffer * input, gint size, GstCaps * caps, GstBuffer ** buf) { if (GST_BUFFER_CAPS (input) != NULL) { /* Output buffer already has caps */ GST_DEBUG_OBJECT (trans, "Input buffer already has caps (implicitely fixed)"); /* FIXME : Move this behaviour to basetransform. The given caps are the ones * of the source pad, therefore our outgoing buffers should always have * those caps. */ gst_buffer_set_caps (input, caps); gst_buffer_ref (input); *buf = input; } else { /* Buffer has no caps. See if the output pad only supports fixed caps */ GstCaps *out_caps; out_caps = GST_PAD_CAPS (trans->srcpad); if (out_caps != NULL) { gst_caps_ref (out_caps); } else { out_caps = gst_pad_get_allowed_caps (trans->srcpad); g_return_val_if_fail (out_caps != NULL, GST_FLOW_ERROR); } out_caps = gst_caps_make_writable (out_caps); gst_caps_do_simplify (out_caps); if (gst_caps_is_fixed (out_caps) && !gst_caps_is_empty (out_caps)) { GST_DEBUG_OBJECT (trans, "Have fixed output caps %" GST_PTR_FORMAT " to apply to buffer with no caps", out_caps); if (gst_buffer_is_metadata_writable (input)) { gst_buffer_ref (input); *buf = input; } else { GST_DEBUG_OBJECT (trans, "Creating sub-buffer and setting caps"); *buf = gst_buffer_create_sub (input, 0, GST_BUFFER_SIZE (input)); } GST_BUFFER_CAPS (*buf) = out_caps; if (GST_PAD_CAPS (trans->srcpad) == NULL) gst_pad_set_caps (trans->srcpad, out_caps); } else { GST_DEBUG_OBJECT (trans, "Have unfixed output caps %" GST_PTR_FORMAT, out_caps); gst_caps_unref (out_caps); } } return GST_FLOW_OK; }
static gboolean gst_msdkvp8enc_set_format (GstMsdkEnc * encoder) { GstMsdkVP8Enc *thiz = GST_MSDKVP8ENC (encoder); GstCaps *template_caps; GstCaps *allowed_caps = NULL; thiz->profile = 0; template_caps = gst_static_pad_template_get_caps (&src_factory); allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); /* If downstream has ANY caps let encoder decide profile and level */ if (allowed_caps == template_caps) { GST_INFO_OBJECT (thiz, "downstream has ANY caps, profile/level set to auto"); } else if (allowed_caps) { GstStructure *s; const gchar *profile; if (gst_caps_is_empty (allowed_caps)) { gst_caps_unref (allowed_caps); gst_caps_unref (template_caps); return FALSE; } allowed_caps = gst_caps_make_writable (allowed_caps); allowed_caps = gst_caps_fixate (allowed_caps); s = gst_caps_get_structure (allowed_caps, 0); profile = gst_structure_get_string (s, "profile"); if (profile) { if (!strcmp (profile, "3")) { thiz->profile = MFX_PROFILE_VP8_3; } else if (!strcmp (profile, "2")) { thiz->profile = MFX_PROFILE_VP8_2; } else if (!strcmp (profile, "1")) { thiz->profile = MFX_PROFILE_VP8_1; } else if (!strcmp (profile, "0")) { thiz->profile = MFX_PROFILE_VP8_0; } else { g_assert_not_reached (); } } gst_caps_unref (allowed_caps); } gst_caps_unref (template_caps); return TRUE; }
/* check downstream caps to configure format and alignment */ static void gst_h264_parse_negotiate (GstH264Parse * h264parse) { GstCaps *caps; guint format = GST_H264_PARSE_FORMAT_NONE; guint align = GST_H264_PARSE_ALIGN_NONE; caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (h264parse)); GST_DEBUG_OBJECT (h264parse, "allowed caps: %" GST_PTR_FORMAT, caps); if (caps && gst_caps_get_size (caps) > 0) { GstStructure *s = gst_caps_get_structure (caps, 0); const gchar *str = NULL; if ((str = gst_structure_get_string (s, "stream-format"))) { if (strcmp (str, "avc") == 0) { format = GST_H264_PARSE_FORMAT_AVC; } else if (strcmp (str, "byte-stream") == 0) { format = GST_H264_PARSE_FORMAT_BYTE; } else { GST_DEBUG_OBJECT (h264parse, "unknown stream-format: %s", str); } } if ((str = gst_structure_get_string (s, "alignment"))) { if (strcmp (str, "au") == 0) { align = GST_H264_PARSE_ALIGN_AU; } else if (strcmp (str, "nal") == 0) { align = GST_H264_PARSE_ALIGN_NAL; } else { GST_DEBUG_OBJECT (h264parse, "unknown alignment: %s", str); } } } if (caps) gst_caps_unref (caps); /* default */ if (!format) format = GST_H264_PARSE_FORMAT_BYTE; if (!align) align = GST_H264_PARSE_ALIGN_AU; GST_DEBUG_OBJECT (h264parse, "selected format %s, alignment %s", gst_h264_parse_get_string (h264parse, TRUE, format), gst_h264_parse_get_string (h264parse, FALSE, align)); h264parse->format = format; h264parse->align = align; }
static gboolean sink_setcaps (GstPad * pad, GstCaps * caps) { GstStructure *structure; GstOmxBaseFilter *omx_base; GOmxCore *gomx; gint rate = 0; omx_base = GST_OMX_BASE_FILTER (GST_PAD_PARENT (pad)); gomx = (GOmxCore *) omx_base->gomx; GST_INFO_OBJECT (omx_base, "setcaps (sink): %" GST_PTR_FORMAT, caps); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "rate", &rate); /* Input port configuration. */ { OMX_AUDIO_PARAM_PCMMODETYPE param; G_OMX_INIT_PARAM (param); param.nPortIndex = omx_base->out_port->port_index; OMX_GetParameter (gomx->omx_handle, OMX_IndexParamAudioPcm, ¶m); param.nSamplingRate = rate; OMX_SetParameter (gomx->omx_handle, OMX_IndexParamAudioPcm, ¶m); } /* set caps on the srcpad */ { GstCaps *tmp_caps; tmp_caps = gst_pad_get_allowed_caps (omx_base->srcpad); tmp_caps = gst_caps_make_writable (tmp_caps); gst_caps_truncate (tmp_caps); gst_pad_fixate_caps (omx_base->srcpad, tmp_caps); if (gst_caps_is_fixed (tmp_caps)) { GST_INFO_OBJECT (omx_base, "fixated to: %" GST_PTR_FORMAT, tmp_caps); gst_pad_set_caps (omx_base->srcpad, tmp_caps); } gst_caps_unref (tmp_caps); } return gst_pad_set_caps (pad, caps); }
static GstCaps * gst_rtp_L16_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad, GstCaps * filter) { GstCaps *otherpadcaps; GstCaps *caps; caps = gst_pad_get_pad_template_caps (pad); otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad); if (otherpadcaps) { if (!gst_caps_is_empty (otherpadcaps)) { GstStructure *structure; gint channels; gint pt; gint rate; structure = gst_caps_get_structure (otherpadcaps, 0); caps = gst_caps_make_writable (caps); if (gst_structure_get_int (structure, "channels", &channels)) { gst_caps_set_simple (caps, "channels", G_TYPE_INT, channels, NULL); } else if (gst_structure_get_int (structure, "payload", &pt)) { if (pt == GST_RTP_PAYLOAD_L16_STEREO) gst_caps_set_simple (caps, "channels", G_TYPE_INT, 2, NULL); else if (pt == GST_RTP_PAYLOAD_L16_MONO) gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL); } if (gst_structure_get_int (structure, "clock-rate", &rate)) { gst_caps_set_simple (caps, "rate", G_TYPE_INT, rate, NULL); } else if (gst_structure_get_int (structure, "payload", &pt)) { if (pt == GST_RTP_PAYLOAD_L16_STEREO || pt == GST_RTP_PAYLOAD_L16_MONO) gst_caps_set_simple (caps, "rate", G_TYPE_INT, 44100, NULL); } } gst_caps_unref (otherpadcaps); } if (filter) { GstCaps *tcaps = caps; caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (tcaps); } return caps; }
static gboolean gst_celt_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info) { GstCeltEnc *enc; GstCaps *otherpadcaps; enc = GST_CELT_ENC (benc); enc->channels = GST_AUDIO_INFO_CHANNELS (info); enc->rate = GST_AUDIO_INFO_RATE (info); /* handle reconfigure */ if (enc->state) { celt_encoder_destroy (enc->state); enc->state = NULL; } if (enc->mode) { celt_mode_destroy (enc->mode); enc->mode = NULL; } memset (&enc->header, 0, sizeof (enc->header)); otherpadcaps = gst_pad_get_allowed_caps (GST_AUDIO_ENCODER_SRC_PAD (enc)); if (otherpadcaps) { if (!gst_caps_is_empty (otherpadcaps)) { GstStructure *ps = gst_caps_get_structure (otherpadcaps, 0); gst_structure_get_int (ps, "frame-size", &enc->frame_size); } gst_caps_unref (otherpadcaps); } if (enc->requested_frame_size > 0) enc->frame_size = enc->requested_frame_size; GST_DEBUG_OBJECT (enc, "channels=%d rate=%d frame-size=%d", enc->channels, enc->rate, enc->frame_size); if (!gst_celt_enc_setup (enc)) return FALSE; /* feedback to base class */ gst_audio_encoder_set_latency (benc, gst_celt_enc_get_latency (enc), gst_celt_enc_get_latency (enc)); gst_audio_encoder_set_frame_samples_min (benc, enc->frame_size); gst_audio_encoder_set_frame_samples_max (benc, enc->frame_size); gst_audio_encoder_set_frame_max (benc, 1); return TRUE; }
gboolean gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) { GstCaps *caps; GstVideoState *state = &base_video_decoder->state; if (base_video_decoder->have_src_caps) return TRUE; caps = gst_pad_get_allowed_caps (base_video_decoder->srcpad); if (!caps) goto null_allowed_caps; if (gst_caps_is_empty (caps)) goto empty_allowed_caps; gst_caps_set_simple (caps, "width", G_TYPE_INT, state->width, "height", G_TYPE_INT, state->height, "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n, state->par_d, "interlaced", G_TYPE_BOOLEAN, state->interlaced, NULL); if (state->fps_d != 0) gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, state->fps_n, state->fps_d, NULL); gst_pad_fixate_caps (base_video_decoder->srcpad, caps); GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps); base_video_decoder->have_src_caps = gst_pad_set_caps (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder), caps); gst_caps_unref (caps); return base_video_decoder->have_src_caps; null_allowed_caps: GST_ERROR_OBJECT (base_video_decoder, "Got null from gst_pad_get_allowed_caps"); return FALSE; empty_allowed_caps: GST_ERROR_OBJECT (base_video_decoder, "Got EMPTY caps from gst_pad_get_allowed_caps"); gst_caps_unref (caps); return FALSE; }
static GstCaps * gst_jpegenc_getcaps (GstPad * pad) { GstJpegEnc *jpegenc = GST_JPEGENC (gst_pad_get_parent (pad)); GstCaps *caps, *othercaps; const GstCaps *templ; gint i, j; GstStructure *structure = NULL; /* we want to proxy properties like width, height and framerate from the other end of the element */ othercaps = gst_pad_get_allowed_caps (jpegenc->srcpad); if (othercaps == NULL || gst_caps_is_empty (othercaps) || gst_caps_is_any (othercaps)) { caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); goto done; } caps = gst_caps_new_empty (); templ = gst_pad_get_pad_template_caps (pad); for (i = 0; i < gst_caps_get_size (templ); i++) { /* pick fields from peer caps */ for (j = 0; j < gst_caps_get_size (othercaps); j++) { GstStructure *s = gst_caps_get_structure (othercaps, j); const GValue *val; structure = gst_structure_copy (gst_caps_get_structure (templ, i)); if ((val = gst_structure_get_value (s, "width"))) gst_structure_set_value (structure, "width", val); if ((val = gst_structure_get_value (s, "height"))) gst_structure_set_value (structure, "height", val); if ((val = gst_structure_get_value (s, "framerate"))) gst_structure_set_value (structure, "framerate", val); gst_caps_merge_structure (caps, structure); } } done: gst_caps_replace (&othercaps, NULL); gst_object_unref (jpegenc); return caps; }