static GstCaps * gst_v4l2sink_get_caps (GstBaseSink * bsink, GstCaps * filter) { GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink); GstCaps *ret; GSList *walk; GSList *formats; if (!GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) { /* FIXME: copy? */ GST_DEBUG_OBJECT (v4l2sink, "device is not open"); return gst_pad_get_pad_template_caps (GST_BASE_SINK_PAD (v4l2sink)); } if (v4l2sink->probed_caps == NULL) { formats = gst_v4l2_object_get_format_list (v4l2sink->v4l2object); ret = gst_caps_new_empty (); for (walk = formats; walk; walk = walk->next) { struct v4l2_fmtdesc *format; GstStructure *template;
static gboolean gst_dtmf_src_negotiate (GstBaseSrc * basesrc) { GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (basesrc); GstCaps *caps; GstStructure *s; gboolean ret; caps = gst_pad_get_allowed_caps (GST_BASE_SRC_PAD (basesrc)); if (!caps) caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (basesrc)); if (gst_caps_is_empty (caps)) { gst_caps_unref (caps); return FALSE; } caps = gst_caps_truncate (caps); caps = gst_caps_make_writable (caps); s = gst_caps_get_structure (caps, 0); gst_structure_fixate_field_nearest_int (s, "rate", DEFAULT_SAMPLE_RATE); if (!gst_structure_get_int (s, "rate", &dtmfsrc->sample_rate)) { GST_ERROR_OBJECT (dtmfsrc, "Could not get rate"); gst_caps_unref (caps); return FALSE; } ret = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps); gst_caps_unref (caps); return ret; }
static GstCaps * gst_rtp_celt_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad) { GstCaps *otherpadcaps; GstCaps *caps; const gchar *params; otherpadcaps = gst_pad_get_allowed_caps (payload->srcpad); caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); if (otherpadcaps) { if (!gst_caps_is_empty (otherpadcaps)) { GstStructure *ps = gst_caps_get_structure (otherpadcaps, 0); GstStructure *s = gst_caps_get_structure (caps, 0); gint clock_rate = 0, frame_size = 0, channels = 1; if (gst_structure_get_int (ps, "clock-rate", &clock_rate)) { gst_structure_fixate_field_nearest_int (s, "rate", clock_rate); } if ((params = gst_structure_get_string (ps, "frame-size"))) frame_size = atoi (params); if (frame_size) gst_structure_set (s, "frame-size", G_TYPE_INT, frame_size, NULL); if ((params = gst_structure_get_string (ps, "encoding-params"))) channels = atoi (params); gst_structure_fixate_field_nearest_int (s, "channels", channels); GST_DEBUG_OBJECT (payload, "clock-rate=%d frame-size=%d channels=%d", clock_rate, frame_size, channels); } gst_caps_unref (otherpadcaps); } return caps; }
/* return a list of caps where we only need to set * width and height to get fixed caps */ static GList * video_crop_get_test_caps (GstElement * videocrop) { const GstCaps *allowed_caps; GstPad *srcpad; GList *list = NULL; guint i; srcpad = gst_element_get_static_pad (videocrop, "src"); g_assert (srcpad != NULL); allowed_caps = gst_pad_get_pad_template_caps (srcpad); g_assert (allowed_caps != NULL); for (i = 0; i < gst_caps_get_size (allowed_caps); ++i) { GstStructure *new_structure; GstCaps *single_caps; single_caps = gst_caps_new_empty (); new_structure = gst_structure_copy (gst_caps_get_structure (allowed_caps, i)); gst_structure_set (new_structure, "framerate", GST_TYPE_FRACTION, FRAMERATE, 1, NULL); gst_structure_remove_field (new_structure, "width"); gst_structure_remove_field (new_structure, "height"); gst_caps_append_structure (single_caps, new_structure); /* should be fixed without width/height */ g_assert (gst_caps_is_fixed (single_caps)); list = g_list_prepend (list, single_caps); } gst_object_unref (srcpad); return list; }
static GstCaps * gst_gtk_gl_sink_get_caps (GstBaseSink * bsink, GstCaps * filter) { GstCaps *tmp = NULL; GstCaps *result = NULL; tmp = gst_pad_get_pad_template_caps (GST_BASE_SINK_PAD (bsink)); if (filter) { GST_DEBUG_OBJECT (bsink, "intersecting with filter caps %" GST_PTR_FORMAT, filter); result = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (tmp); } else { result = tmp; } result = gst_gl_overlay_compositor_add_caps (result); GST_DEBUG_OBJECT (bsink, "returning caps: %" GST_PTR_FORMAT, result); return result; }
static GstCaps * gst_rtp_speex_pay_getcaps (GstBaseRTPPayload * payload, GstPad * pad) { GstCaps *otherpadcaps; GstCaps *caps; otherpadcaps = gst_pad_get_allowed_caps (payload->srcpad); caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); if (otherpadcaps) { if (!gst_caps_is_empty (otherpadcaps)) { GstStructure *ps = gst_caps_get_structure (otherpadcaps, 0); GstStructure *s = gst_caps_get_structure (caps, 0); gint clock_rate; if (gst_structure_get_int (ps, "clock-rate", &clock_rate)) { gst_structure_fixate_field_nearest_int (s, "rate", clock_rate); } } gst_caps_unref (otherpadcaps); } return caps; }
static GstCaps * gst_droid_cam_src_vfsrc_getcaps (GstPad * pad) { GstDroidCamSrc *src = GST_DROID_CAM_SRC (GST_OBJECT_PARENT (pad)); GstCaps *caps = NULL; GST_DEBUG_OBJECT (src, "vfsrc getcaps"); GST_OBJECT_LOCK (src); if (src->camera_params) { int x; uint len; caps = camera_params_get_viewfinder_caps (src->camera_params); len = gst_caps_get_size (caps); GST_CAMERA_BUFFER_POOL_LOCK (src->pool); for (x = 0; x < len; x++) { GstStructure *s = gst_caps_get_structure (caps, x); gst_structure_set (s, "orientation-angle", G_TYPE_INT, src->pool->orientation, NULL); } GST_CAMERA_BUFFER_POOL_UNLOCK (src->pool); } else { caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); } GST_OBJECT_UNLOCK (src); GST_LOG_OBJECT (src, "returning %" GST_PTR_FORMAT, caps); return caps; }
static gboolean gst_vaapidecode_src_query (GstVideoDecoder * vdec, GstQuery * query) { gboolean ret = TRUE; GstElement *const element = GST_ELEMENT (vdec); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_CAPS:{ GstCaps *caps, *filter = NULL; GstPad *pad = GST_VIDEO_DECODER_SRC_PAD (vdec); gst_query_parse_caps (query, &filter); caps = gst_pad_get_pad_template_caps (pad); if (filter) { GstCaps *tmp = caps; caps = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (tmp); } gst_query_set_caps_result (query, caps); gst_caps_unref (caps); break; } case GST_QUERY_CONTEXT:{ ret = gst_vaapi_handle_context_query (element, query); break; } default:{ ret = GST_VIDEO_DECODER_CLASS (parent_class)->src_query (vdec, query); break; } } return ret; }
static GstFlowReturn gst_asteriskh263_chain (GstPad * pad, GstBuffer * buf) { GstAsteriskh263 *asteriskh263; GstBuffer *outbuf; GstFlowReturn ret; asteriskh263 = GST_ASTERISK_H263 (GST_OBJECT_PARENT (pad)); if (!gst_rtp_buffer_validate (buf)) goto bad_packet; { gint payload_len; guint8 *payload; gboolean M; guint32 timestamp; guint32 samples; guint16 asterisk_len; payload_len = gst_rtp_buffer_get_payload_len (buf); payload = gst_rtp_buffer_get_payload (buf); M = gst_rtp_buffer_get_marker (buf); timestamp = gst_rtp_buffer_get_timestamp (buf); outbuf = gst_buffer_new_and_alloc (payload_len + GST_ASTERISKH263_HEADER_LEN); /* build the asterisk header */ asterisk_len = payload_len; if (M) asterisk_len |= 0x8000; if (!asteriskh263->lastts) asteriskh263->lastts = timestamp; samples = timestamp - asteriskh263->lastts; asteriskh263->lastts = timestamp; GST_ASTERISKH263_HEADER_TIMESTAMP (outbuf) = g_htonl (samples); GST_ASTERISKH263_HEADER_LENGTH (outbuf) = g_htons (asterisk_len); /* copy the data into place */ memcpy (GST_BUFFER_DATA (outbuf) + GST_ASTERISKH263_HEADER_LEN, payload, payload_len); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; gst_buffer_set_caps (outbuf, (GstCaps *) gst_pad_get_pad_template_caps (asteriskh263->srcpad)); ret = gst_pad_push (asteriskh263->srcpad, outbuf); gst_buffer_unref (buf); } return ret; bad_packet: { GST_DEBUG ("Packet does not validate"); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_rsvg_decode_image (GstRsvgDec * rsvg, const guint8 * data, guint size, GstBuffer ** buffer) { GstFlowReturn ret = GST_FLOW_OK; cairo_t *cr; cairo_surface_t *surface; RsvgHandle *handle; GError *error = NULL; RsvgDimensionData dimension; gdouble scalex, scaley; GST_LOG_OBJECT (rsvg, "parsing svg"); handle = rsvg_handle_new_from_data (data, size, &error); if (!handle) { GST_ERROR_OBJECT (rsvg, "Failed to parse SVG image: %s", error->message); g_error_free (error); return GST_FLOW_ERROR; } rsvg_handle_get_dimensions (handle, &dimension); if (rsvg->width != dimension.width || rsvg->height != dimension.height) { GstCaps *caps1, *caps2, *caps3; GstStructure *s; GST_LOG_OBJECT (rsvg, "resolution changed, updating caps"); caps1 = gst_caps_copy (gst_pad_get_pad_template_caps (rsvg->srcpad)); caps2 = gst_pad_peer_get_caps (rsvg->srcpad); if (caps2) { caps3 = gst_caps_intersect (caps1, caps2); gst_caps_unref (caps1); gst_caps_unref (caps2); caps1 = caps3; caps3 = NULL; } if (gst_caps_is_empty (caps1)) { GST_ERROR_OBJECT (rsvg, "Unable to negotiate a format"); gst_caps_unref (caps1); g_object_unref (handle); return GST_FLOW_NOT_NEGOTIATED; } caps2 = gst_caps_copy (gst_pad_get_pad_template_caps (rsvg->srcpad)); s = gst_caps_get_structure (caps2, 0); gst_structure_set (s, "width", G_TYPE_INT, dimension.width, "height", G_TYPE_INT, dimension.height, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); caps3 = gst_caps_intersect (caps1, caps2); if (!gst_caps_is_empty (caps3)) { gst_caps_truncate (caps3); gst_pad_set_caps (rsvg->srcpad, caps3); gst_caps_unref (caps1); gst_caps_unref (caps2); gst_caps_unref (caps3); rsvg->width = dimension.width; rsvg->height = dimension.height; } else { gst_caps_unref (caps2); gst_caps_unref (caps3); gst_caps_truncate (caps1); s = gst_caps_get_structure (caps1, 0); gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); if (!gst_caps_is_fixed (caps1) && (!gst_structure_fixate_field_nearest_int (s, "width", dimension.width) || !gst_structure_fixate_field_nearest_int (s, "height", dimension.height))) { g_object_unref (handle); GST_ERROR_OBJECT (rsvg, "Failed to fixate caps"); return GST_FLOW_NOT_NEGOTIATED; } gst_pad_set_caps (rsvg->srcpad, caps1); gst_structure_get_int (s, "width", &rsvg->width); gst_structure_get_int (s, "height", &rsvg->height); gst_caps_unref (caps1); } } if ((ret = gst_pad_alloc_buffer_and_set_caps (rsvg->srcpad, GST_BUFFER_OFFSET_NONE, rsvg->width * rsvg->height * 4, GST_PAD_CAPS (rsvg->srcpad), buffer)) != GST_FLOW_OK) { g_object_unref (handle); GST_ERROR_OBJECT (rsvg, "Buffer allocation failed %s", gst_flow_get_name (ret)); return ret; } GST_LOG_OBJECT (rsvg, "render image at %d x %d", rsvg->height, rsvg->width); surface = cairo_image_surface_create_for_data (GST_BUFFER_DATA (*buffer), CAIRO_FORMAT_ARGB32, rsvg->width, rsvg->height, rsvg->width * 4); cr = cairo_create (surface); cairo_set_operator (cr, CAIRO_OPERATOR_CLEAR); cairo_set_source_rgba (cr, 1.0, 1.0, 1.0, 0.0); cairo_paint (cr); cairo_set_operator (cr, CAIRO_OPERATOR_OVER); cairo_set_source_rgba (cr, 0.0, 0.0, 0.0, 1.0); scalex = scaley = 1.0; if (rsvg->width != dimension.width) { scalex = ((gdouble) rsvg->width) / ((gdouble) dimension.width); } if (rsvg->height != dimension.height) { scaley = ((gdouble) rsvg->height) / ((gdouble) dimension.height); } cairo_scale (cr, scalex, scaley); rsvg_handle_render_cairo (handle, cr); g_object_unref (handle); cairo_destroy (cr); cairo_surface_destroy (surface); /* Now unpremultiply Cairo's ARGB to match GStreamer's */ gst_rsvg_decode_unpremultiply (GST_BUFFER_DATA (*buffer), rsvg->width, rsvg->height); return ret; }
static gboolean kms_recorder_endpoint_query_caps (KmsElement * element, GstPad * pad, GstQuery * query) { KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (element); GstCaps *allowed = NULL, *caps = NULL; GstCaps *filter, *result, *tcaps; GstPad *target; target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad)); if (target == NULL) { GST_ERROR_OBJECT (pad, "No target pad set"); return FALSE; } gst_query_parse_caps (query, &filter); switch (kms_element_get_pad_type (element, pad)) { case KMS_ELEMENT_PAD_TYPE_VIDEO: caps = kms_recorder_endpoint_get_caps_from_profile (self, KMS_ELEMENT_PAD_TYPE_VIDEO); result = gst_caps_from_string (KMS_AGNOSTIC_VIDEO_CAPS); break; case KMS_ELEMENT_PAD_TYPE_AUDIO: caps = kms_recorder_endpoint_get_caps_from_profile (self, KMS_ELEMENT_PAD_TYPE_AUDIO); result = gst_caps_from_string (KMS_AGNOSTIC_AUDIO_CAPS); break; default: GST_ERROR_OBJECT (pad, "unknown pad"); g_object_unref (target); return FALSE; } allowed = gst_pad_get_allowed_caps (target); /* make sure we only return results that intersect our padtemplate */ tcaps = gst_pad_get_pad_template_caps (pad); if (tcaps != NULL) { /* Update result caps */ gst_caps_unref (result); if (allowed == NULL) { result = gst_caps_ref (tcaps); } else { result = gst_caps_intersect (allowed, tcaps); } gst_caps_unref (tcaps); } else { GST_WARNING_OBJECT (pad, "Can not get capabilities from pad's template. Using agnostic's' caps"); } if (caps == NULL) { GST_ERROR_OBJECT (self, "No caps from profile"); } else { GstElement *appsrc; GstPad *srcpad; gchar *id; id = gst_pad_get_name (pad); KMS_ELEMENT_LOCK (KMS_ELEMENT (self)); appsrc = g_hash_table_lookup (self->priv->srcs, id); g_free (id); if (appsrc == NULL) { GstCaps *aux; KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); GST_ERROR_OBJECT (self, "No appsrc attached to pad %" GST_PTR_FORMAT, pad); /* Filter against profile */ GST_WARNING_OBJECT (appsrc, "Using generic profile's caps"); aux = gst_caps_intersect (caps, result); gst_caps_unref (result); result = aux; goto filter_caps; } srcpad = gst_element_get_static_pad (appsrc, "src"); KMS_ELEMENT_UNLOCK (KMS_ELEMENT (self)); /* Get encodebin's caps filtering by profile */ tcaps = gst_pad_peer_query_caps (srcpad, caps); if (tcaps != NULL) { /* Filter against filtered encodebin's caps */ GstCaps *aux; aux = gst_caps_intersect (tcaps, result); gst_caps_unref (result); gst_caps_unref (tcaps); result = aux; } else if (caps != NULL) { /* Filter against profile */ GstCaps *aux; GST_WARNING_OBJECT (appsrc, "Using generic profile's caps"); aux = gst_caps_intersect (caps, result); gst_caps_unref (result); result = aux; } g_object_unref (srcpad); } filter_caps: /* filter against the query filter when needed */ if (filter != NULL) { GstCaps *aux; aux = gst_caps_intersect (result, filter); gst_caps_unref (result); result = aux; } gst_query_set_caps_result (query, result); gst_caps_unref (result); if (allowed != NULL) gst_caps_unref (allowed); if (caps != NULL) gst_caps_unref (caps); g_object_unref (target); return TRUE; }
static GstFlowReturn gst_jasper_dec_negotiate (GstJasperDec * dec, jas_image_t * image) { GstFlowReturn flow_ret = GST_FLOW_OK; gint width, height, channels; gint i, j; gboolean negotiate = FALSE; jas_clrspc_t clrspc; GstCaps *allowed_caps, *caps; width = jas_image_width (image); height = jas_image_height (image); channels = jas_image_numcmpts (image); GST_LOG_OBJECT (dec, "%d x %d, %d components", width, height, channels); /* jp2c bitstream has no real colour space info (kept in container), * so decoder may only pretend to know, where it really does not */ if (!jas_clrspc_isunknown (dec->clrspc)) { clrspc = dec->clrspc; GST_DEBUG_OBJECT (dec, "forcing container supplied colour space %d", clrspc); jas_image_setclrspc (image, clrspc); } else clrspc = jas_image_clrspc (image); if (!width || !height || !channels || jas_clrspc_isunknown (clrspc)) goto fail_image; if (dec->width != width || dec->height != height || dec->channels != channels || dec->clrspc != clrspc) negotiate = TRUE; if (channels != 3) goto not_supported; for (i = 0; i < channels; i++) { gint cheight, cwidth, depth, sgnd; cheight = jas_image_cmptheight (image, i); cwidth = jas_image_cmptwidth (image, i); depth = jas_image_cmptprec (image, i); sgnd = jas_image_cmptsgnd (image, i); GST_LOG_OBJECT (dec, "image component %d, %dx%d, depth %d, sgnd %d", i, cwidth, cheight, depth, sgnd); if (depth != 8 || sgnd) goto not_supported; if (dec->cheight[i] != cheight || dec->cwidth[i] != cwidth) { dec->cheight[i] = cheight; dec->cwidth[i] = cwidth; negotiate = TRUE; } } if (!negotiate && dec->format != GST_VIDEO_FORMAT_UNKNOWN) goto done; /* clear and refresh to new state */ flow_ret = GST_FLOW_NOT_NEGOTIATED; dec->format = GST_VIDEO_FORMAT_UNKNOWN; dec->width = width; dec->height = height; dec->channels = channels; /* retrieve allowed caps, and find the first one that reasonably maps * to the parameters of the colourspace */ caps = gst_pad_get_allowed_caps (dec->srcpad); if (!caps) { GST_DEBUG_OBJECT (dec, "... but no peer, using template caps"); /* need to copy because get_allowed_caps returns a ref, and get_pad_template_caps doesn't */ caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad)); } /* avoid lists of fourcc, etc */ allowed_caps = gst_caps_normalize (caps); caps = NULL; GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps); for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { GstVideoFormat format; gboolean ok; if (caps) gst_caps_unref (caps); caps = gst_caps_copy_nth (allowed_caps, i); /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */ gst_pad_fixate_caps (dec->srcpad, caps); GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps); if (!gst_video_format_parse_caps (caps, &format, NULL, NULL)) continue; if (gst_video_format_is_rgb (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_RGB) { GST_DEBUG_OBJECT (dec, "trying RGB"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_R))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_G))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_B))) < 0) { GST_DEBUG_OBJECT (dec, "missing RGB color component"); continue; } } else if (gst_video_format_is_yuv (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_YCBCR) { GST_DEBUG_OBJECT (dec, "trying YUV"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_Y))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CB))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CR))) < 0) { GST_DEBUG_OBJECT (dec, "missing YUV color component"); continue; } } else continue; /* match format with validity checks */ ok = TRUE; for (j = 0; j < channels; j++) { gint cmpt; cmpt = dec->cmpt[j]; if (dec->cwidth[cmpt] != gst_video_format_get_component_width (format, j, width) || dec->cheight[cmpt] != gst_video_format_get_component_height (format, j, height)) ok = FALSE; } /* commit to this format */ if (ok) { dec->format = format; break; } } if (caps) gst_caps_unref (caps); gst_caps_unref (allowed_caps); if (dec->format != GST_VIDEO_FORMAT_UNKNOWN) { /* cache some video format properties */ for (j = 0; j < channels; ++j) { dec->offset[j] = gst_video_format_get_component_offset (dec->format, j, dec->width, dec->height); dec->inc[j] = gst_video_format_get_pixel_stride (dec->format, j); dec->stride[j] = gst_video_format_get_row_stride (dec->format, j, dec->width); } dec->image_size = gst_video_format_get_size (dec->format, width, height); dec->alpha = gst_video_format_has_alpha (dec->format); if (dec->buf) g_free (dec->buf); dec->buf = g_new0 (glong, dec->width); caps = gst_video_format_new_caps (dec->format, dec->width, dec->height, dec->framerate_numerator, dec->framerate_denominator, 1, 1); GST_DEBUG_OBJECT (dec, "Set format to %d, size to %dx%d", dec->format, dec->width, dec->height); if (!gst_pad_set_caps (dec->srcpad, caps)) flow_ret = GST_FLOW_NOT_NEGOTIATED; else flow_ret = GST_FLOW_OK; gst_caps_unref (caps); } done: return flow_ret; /* ERRORS */ fail_image: { GST_DEBUG_OBJECT (dec, "Failed to process decoded image."); flow_ret = GST_FLOW_NOT_NEGOTIATED; goto done; } not_supported: { GST_DEBUG_OBJECT (dec, "Decoded image has unsupported colour space."); GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Unsupported colorspace")); flow_ret = GST_FLOW_ERROR; goto done; } }
static GstCaps * gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *templ, *ret, *tmp; if (gst_pad_has_current_caps (pad)) return gst_pad_get_current_caps (pad); else if (gst_pad_has_current_caps (self->video_sinkpad)) return gst_pad_get_current_caps (self->video_sinkpad); templ = gst_pad_get_pad_template_caps (self->video_sinkpad); tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL); if (tmp) { ret = gst_caps_intersect (tmp, templ); gst_caps_unref (templ); gst_caps_unref (tmp); } else { ret = templ; } GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; tmp = gst_pad_peer_query_caps (pad, NULL); GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; if (self->vinfo.height && self->vinfo.width) { guint i, n; ret = gst_caps_make_writable (ret); n = gst_caps_get_size (ret); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height", G_TYPE_INT, self->vinfo.height, NULL); } } tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL); GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection, *tmp2; guint i, n; tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad); intersection = gst_caps_intersect (tmp, tmp2); gst_caps_unref (tmp); gst_caps_unref (tmp2); tmp = gst_caps_make_writable (intersection); n = gst_caps_get_size (tmp); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (tmp, i); gst_structure_remove_fields (s, "format", "framerate", NULL); gst_structure_set_name (s, "video/x-raw"); } intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } done: gst_object_unref (self); GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static GstCaps * gst_deinterleave_sink_getcaps (GstPad * pad) { GstDeinterleave *self = GST_DEINTERLEAVE (gst_pad_get_parent (pad)); GstCaps *ret; GList *l; GST_OBJECT_LOCK (self); /* Intersect all of our pad template caps with the peer caps of the pad * to get all formats that are possible up- and downstream. * * For the pad for which the caps are requested we don't remove the channel * informations as they must be in the returned caps and incompatibilities * will be detected here already */ ret = gst_caps_new_any (); for (l = GST_ELEMENT (self)->pads; l != NULL; l = l->next) { GstPad *ourpad = GST_PAD (l->data); GstCaps *peercaps = NULL, *ourcaps; ourcaps = gst_caps_copy (gst_pad_get_pad_template_caps (ourpad)); if (pad == ourpad) { if (GST_PAD_DIRECTION (pad) == GST_PAD_SINK) __set_channels (ourcaps, self->channels); else __set_channels (ourcaps, 1); } else { __remove_channels (ourcaps); /* Only ask for peer caps for other pads than pad * as otherwise gst_pad_peer_get_caps() might call * back into this function and deadlock */ peercaps = gst_pad_peer_get_caps (ourpad); } /* If the peer exists and has caps add them to the intersection, * otherwise assume that the peer accepts everything */ if (peercaps) { GstCaps *intersection; GstCaps *oldret = ret; __remove_channels (peercaps); intersection = gst_caps_intersect (peercaps, ourcaps); ret = gst_caps_intersect (ret, intersection); gst_caps_unref (intersection); gst_caps_unref (peercaps); gst_caps_unref (oldret); } else { GstCaps *oldret = ret; ret = gst_caps_intersect (ret, ourcaps); gst_caps_unref (oldret); } gst_caps_unref (ourcaps); } GST_OBJECT_UNLOCK (self); gst_object_unref (self); GST_DEBUG_OBJECT (pad, "Intersected caps to %" GST_PTR_FORMAT, ret); return ret; }
static gboolean gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port, GstVideoCodecState * state) { GstOMXH264Enc *self = GST_OMX_H264_ENC (enc); GstCaps *peercaps; OMX_PARAM_PORTDEFINITIONTYPE port_def; OMX_VIDEO_PARAM_PROFILELEVELTYPE param; OMX_ERRORTYPE err; const gchar *profile_string, *level_string; gst_omx_port_get_port_definition (GST_OMX_VIDEO_ENC (self)->enc_out_port, &port_def); port_def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC; err = gst_omx_port_update_port_definition (GST_OMX_VIDEO_ENC (self)->enc_out_port, &port_def); if (err != OMX_ErrorNone) return FALSE; GST_OMX_INIT_STRUCT (¶m); param.nPortIndex = GST_OMX_VIDEO_ENC (self)->enc_out_port->index; err = gst_omx_component_get_parameter (GST_OMX_VIDEO_ENC (self)->enc, OMX_IndexParamVideoProfileLevelCurrent, ¶m); if (err != OMX_ErrorNone) { GST_WARNING_OBJECT (self, "Setting profile/level not supported by component"); return TRUE; } peercaps = gst_pad_peer_query_caps (GST_VIDEO_ENCODER_SRC_PAD (enc), gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (enc))); if (peercaps) { GstStructure *s; if (gst_caps_is_empty (peercaps)) { gst_caps_unref (peercaps); GST_ERROR_OBJECT (self, "Empty caps"); return FALSE; } s = gst_caps_get_structure (peercaps, 0); profile_string = gst_structure_get_string (s, "profile"); if (profile_string) { if (g_str_equal (profile_string, "baseline")) { param.eProfile = OMX_VIDEO_AVCProfileBaseline; } else if (g_str_equal (profile_string, "main")) { param.eProfile = OMX_VIDEO_AVCProfileMain; } else if (g_str_equal (profile_string, "extended")) { param.eProfile = OMX_VIDEO_AVCProfileExtended; } else if (g_str_equal (profile_string, "high")) { param.eProfile = OMX_VIDEO_AVCProfileHigh; } else if (g_str_equal (profile_string, "high-10")) { param.eProfile = OMX_VIDEO_AVCProfileHigh10; } else if (g_str_equal (profile_string, "high-4:2:2")) { param.eProfile = OMX_VIDEO_AVCProfileHigh422; } else if (g_str_equal (profile_string, "high-4:4:4")) { param.eProfile = OMX_VIDEO_AVCProfileHigh444; } else { goto unsupported_profile; } } level_string = gst_structure_get_string (s, "level"); if (level_string) { if (g_str_equal (level_string, "1")) { param.eLevel = OMX_VIDEO_AVCLevel1; } else if (g_str_equal (level_string, "1b")) { param.eLevel = OMX_VIDEO_AVCLevel1b; } else if (g_str_equal (level_string, "1.1")) { param.eLevel = OMX_VIDEO_AVCLevel11; } else if (g_str_equal (level_string, "1.2")) { param.eLevel = OMX_VIDEO_AVCLevel12; } else if (g_str_equal (level_string, "1.3")) { param.eLevel = OMX_VIDEO_AVCLevel13; } else if (g_str_equal (level_string, "2")) { param.eLevel = OMX_VIDEO_AVCLevel2; } else if (g_str_equal (level_string, "2.1")) { param.eLevel = OMX_VIDEO_AVCLevel21; } else if (g_str_equal (level_string, "2.2")) { param.eLevel = OMX_VIDEO_AVCLevel22; } else if (g_str_equal (level_string, "3")) { param.eLevel = OMX_VIDEO_AVCLevel3; } else if (g_str_equal (level_string, "3.1")) { param.eLevel = OMX_VIDEO_AVCLevel31; } else if (g_str_equal (level_string, "3.2")) { param.eLevel = OMX_VIDEO_AVCLevel32; } else if (g_str_equal (level_string, "4")) { param.eLevel = OMX_VIDEO_AVCLevel4; } else if (g_str_equal (level_string, "4.1")) { param.eLevel = OMX_VIDEO_AVCLevel41; } else if (g_str_equal (level_string, "4.2")) { param.eLevel = OMX_VIDEO_AVCLevel42; } else if (g_str_equal (level_string, "5")) { param.eLevel = OMX_VIDEO_AVCLevel5; } else if (g_str_equal (level_string, "5.1")) { param.eLevel = OMX_VIDEO_AVCLevel51; } else { goto unsupported_level; } } gst_caps_unref (peercaps); } err = gst_omx_component_set_parameter (GST_OMX_VIDEO_ENC (self)->enc, OMX_IndexParamVideoProfileLevelCurrent, ¶m); if (err == OMX_ErrorUnsupportedIndex) { GST_WARNING_OBJECT (self, "Setting profile/level not supported by component"); } else if (err != OMX_ErrorNone) { GST_ERROR_OBJECT (self, "Error setting profile %u and level %u: %s (0x%08x)", (guint) param.eProfile, (guint) param.eLevel, gst_omx_error_to_string (err), err); return FALSE; } return TRUE; unsupported_profile: GST_ERROR_OBJECT (self, "Unsupported profile %s", profile_string); gst_caps_unref (peercaps); return FALSE; unsupported_level: GST_ERROR_OBJECT (self, "Unsupported level %s", level_string); gst_caps_unref (peercaps); return FALSE; }
CapsPtr Pad::padTemplateCaps() const { return CapsPtr::wrap(gst_pad_get_pad_template_caps(object<GstPad>()), false); }
static GstFlowReturn gst_wildmidi_parse_song (GstWildmidi * wildmidi) { struct _WM_Info *info; GstCaps *outcaps; guint8 *data; guint size; GST_DEBUG_OBJECT (wildmidi, "Parsing song"); size = gst_adapter_available (wildmidi->adapter); data = gst_adapter_take (wildmidi->adapter, size); /* this method takes our memory block */ GST_OBJECT_LOCK (wildmidi); wildmidi->song = WildMidi_OpenBuffer (data, size); if (!wildmidi->song) goto open_failed; #ifdef HAVE_WILDMIDI_0_2_2 WildMidi_LoadSamples (wildmidi->song); #endif #ifdef HAVE_WILDMIDI_0_2_2 WildMidi_SetOption (wildmidi->song, WM_MO_LINEAR_VOLUME, wildmidi->linear_volume); WildMidi_SetOption (wildmidi->song, WM_MO_EXPENSIVE_INTERPOLATION, wildmidi->high_quality); #else WildMidi_SetOption (wildmidi->song, WM_MO_LOG_VOLUME, !wildmidi->linear_volume); WildMidi_SetOption (wildmidi->song, WM_MO_ENHANCED_RESAMPLING, wildmidi->high_quality); #endif info = WildMidi_GetInfo (wildmidi->song); GST_OBJECT_UNLOCK (wildmidi); wildmidi->o_len = info->approx_total_samples; outcaps = gst_caps_copy (gst_pad_get_pad_template_caps (wildmidi->srcpad)); gst_pad_set_caps (wildmidi->srcpad, outcaps); gst_caps_unref (outcaps); /* we keep an internal segment in samples */ gst_segment_init (wildmidi->o_segment, GST_FORMAT_DEFAULT); gst_pad_push_event (wildmidi->srcpad, gst_wildmidi_get_new_segment_event (wildmidi, GST_FORMAT_TIME)); GST_DEBUG_OBJECT (wildmidi, "Parsing song done"); return GST_FLOW_OK; /* ERRORS */ open_failed: { GST_OBJECT_UNLOCK (wildmidi); GST_ELEMENT_ERROR (wildmidi, STREAM, DECODE, (NULL), ("Unable to parse midi data")); return GST_FLOW_ERROR; } }
static GstCaps * gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad, GstCaps * filter) { GstRtpH263PPay *rtph263ppay; GstCaps *caps = NULL, *templ; GstCaps *peercaps = NULL; GstCaps *intersect = NULL; guint i; rtph263ppay = GST_RTP_H263P_PAY (payload); peercaps = gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), filter); /* if we're just outputting to udpsink or fakesink or so, we should also * accept any input compatible with our sink template caps */ if (!peercaps || gst_caps_is_any (peercaps)) return gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload)); /* We basically need to differentiate two use-cases here: One where there's * a capsfilter after the payloader with caps created from an SDP; in this * case the filter caps are fixed and we want to signal to an encoder what * we want it to produce. The second case is simply payloader ! depayloader * where we are dealing with the depayloader's template caps. In this case * we should accept any input compatible with our sink template caps. */ if (!gst_caps_is_fixed (peercaps)) return gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload)); templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); intersect = gst_caps_intersect (peercaps, templ); gst_caps_unref (peercaps); gst_caps_unref (templ); if (gst_caps_is_empty (intersect)) return intersect; caps = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (intersect); i++) { GstStructure *s = gst_caps_get_structure (intersect, i); const gchar *encoding_name = gst_structure_get_string (s, "encoding-name"); if (!strcmp (encoding_name, "H263-2000")) { const gchar *profile_str = gst_structure_get_string (s, "profile"); const gchar *level_str = gst_structure_get_string (s, "level"); int profile = 0; int level = 0; if (profile_str && level_str) { gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE, v = FALSE; GstStructure *new_s = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); profile = atoi (profile_str); level = atoi (level_str); /* These profiles are defined in the H.263 Annex X */ switch (profile) { case 0: /* The Baseline Profile (Profile 0) */ break; case 1: /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile * (Profile 1) * Baseline + Annexes I, J, L.4 and T */ i = j = l = t = TRUE; break; case 2: /* Version 1 Backward-Compatibility Profile (Profile 2) * Baseline + Annex F */ i = j = l = t = f = TRUE; break; case 3: /* Version 2 Interactive and Streaming Wireless Profile * Baseline + Annexes I, J, T */ i = j = t = TRUE; break; case 4: /* Version 3 Interactive and Streaming Wireless Profile (Profile 4) * Baseline + Annexes I, J, T, V, W.6.3.8, */ /* Missing W.6.3.8 */ i = j = t = v = TRUE; break; case 5: /* Conversational High Compression Profile (Profile 5) * Baseline + Annexes F, I, J, L.4, T, D, U */ /* Missing D, U */ f = i = j = l = t = TRUE; break; case 6: /* Conversational Internet Profile (Profile 6) * Baseline + Annexes F, I, J, L.4, T, D, U and * K with arbitratry slice ordering */ /* Missing D, U, K with arbitratry slice ordering */ f = i = j = l = t = TRUE; break; case 7: /* Conversational Interlace Profile (Profile 7) * Baseline + Annexes F, I, J, L.4, T, D, U, W.6.3.11 */ /* Missing D, U, W.6.3.11 */ f = i = j = l = t = TRUE; break; case 8: /* High Latency Profile (Profile 8) * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and * K with arbitratry slice ordering */ /* Missing D, U, P.5, O.1.1 */ f = i = j = l = t = TRUE; break; } if (f || i || j || t || l || v) { GValue list = { 0 }; GValue vstr = { 0 }; g_value_init (&list, GST_TYPE_LIST); g_value_init (&vstr, G_TYPE_STRING); g_value_set_static_string (&vstr, "h263"); gst_value_list_append_value (&list, &vstr); g_value_set_static_string (&vstr, "h263p"); gst_value_list_append_value (&list, &vstr); if (l || v) { g_value_set_static_string (&vstr, "h263pp"); gst_value_list_append_value (&list, &vstr); } g_value_unset (&vstr); gst_structure_set_value (new_s, "h263version", &list); g_value_unset (&list); } else { gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL); } if (!f) gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL); if (!i) gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL); if (!j) gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL); if (!t) gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL); if (!l) gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL); if (!v) gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL); if (level <= 10 || level == 45) { gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 176, "height", GST_TYPE_INT_RANGE, 1, 144, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL); caps = gst_caps_merge_structure (caps, new_s); } else if (level <= 20) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 176, "height", GST_TYPE_INT_RANGE, 1, 144, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else if (level <= 40) { gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL); caps = gst_caps_merge_structure (caps, new_s); } else if (level <= 50) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 352, "height", GST_TYPE_INT_RANGE, 1, 240, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else if (level <= 60) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 288, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 240, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else if (level <= 70) { GstStructure *s_copy = gst_structure_copy (new_s); gst_structure_set (new_s, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 576, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL); caps = gst_caps_merge_structure (caps, new_s); gst_structure_set (s_copy, "width", GST_TYPE_INT_RANGE, 1, 720, "height", GST_TYPE_INT_RANGE, 1, 480, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL); caps = gst_caps_merge_structure (caps, s_copy); } else { caps = gst_caps_merge_structure (caps, new_s); } } else { GstStructure *new_s = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", "h263version", G_TYPE_STRING, "h263", NULL); GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified" " for H263-2000, defaulting to baseline H263"); caps = gst_caps_merge_structure (caps, new_s); } } else { gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE; /* FIXME: ffmpeg support the Appendix K too, how do we express it ? * guint k; */ const gchar *str; GstStructure *new_s = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); gboolean added = FALSE; str = gst_structure_get_string (s, "f"); if (str && !strcmp (str, "1")) f = TRUE; str = gst_structure_get_string (s, "i"); if (str && !strcmp (str, "1")) i = TRUE; str = gst_structure_get_string (s, "j"); if (str && !strcmp (str, "1")) j = TRUE; str = gst_structure_get_string (s, "t"); if (str && !strcmp (str, "1")) t = TRUE; if (f || i || j || t) { GValue list = { 0 }; GValue vstr = { 0 }; g_value_init (&list, GST_TYPE_LIST); g_value_init (&vstr, G_TYPE_STRING); g_value_set_static_string (&vstr, "h263"); gst_value_list_append_value (&list, &vstr); g_value_set_static_string (&vstr, "h263p"); gst_value_list_append_value (&list, &vstr); g_value_unset (&vstr); gst_structure_set_value (new_s, "h263version", &list); g_value_unset (&list); } else { gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL); } if (!f) gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL); if (!i) gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL); if (!j) gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL); if (!t) gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL); str = gst_structure_get_string (s, "custom"); if (str) { unsigned int xmax, ymax, mpi; if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) { if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) { caps = caps_append (caps, new_s, xmax, ymax, mpi); added = TRUE; } else { GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI" " %u x %u at %u, ignoring", xmax, ymax, mpi); } } else { GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s," " ignoring", str); } } str = gst_structure_get_string (s, "16cif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 1408, 1152, mpi); added = TRUE; } str = gst_structure_get_string (s, "4cif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 704, 576, mpi); added = TRUE; } str = gst_structure_get_string (s, "cif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 352, 288, mpi); added = TRUE; } str = gst_structure_get_string (s, "qcif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 176, 144, mpi); added = TRUE; } str = gst_structure_get_string (s, "sqcif"); if (str) { int mpi = atoi (str); caps = caps_append (caps, new_s, 128, 96, mpi); added = TRUE; } if (added) gst_structure_free (new_s); else caps = gst_caps_merge_structure (caps, new_s); } } gst_caps_unref (intersect); return caps; }
static GstCaps * gst_opus_enc_sink_getcaps (GstAudioEncoder * benc) { GstOpusEnc *enc; GstCaps *caps; GstCaps *peercaps = NULL; GstCaps *intersect = NULL; guint i; gboolean allow_multistream; enc = GST_OPUS_ENC (benc); GST_DEBUG_OBJECT (enc, "sink getcaps"); peercaps = gst_pad_peer_get_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)); if (!peercaps) { GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps"); return gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD (benc))); } intersect = gst_caps_intersect (peercaps, gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc))); gst_caps_unref (peercaps); if (gst_caps_is_empty (intersect)) return intersect; allow_multistream = FALSE; for (i = 0; i < gst_caps_get_size (intersect); i++) { GstStructure *s = gst_caps_get_structure (intersect, i); gboolean multistream; if (gst_structure_get_boolean (s, "multistream", &multistream)) { if (multistream) { allow_multistream = TRUE; } } else { allow_multistream = TRUE; } } gst_caps_unref (intersect); caps = gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD (benc))); if (!allow_multistream) { GValue range = { 0 }; g_value_init (&range, GST_TYPE_INT_RANGE); gst_value_set_int_range (&range, 1, 2); for (i = 0; i < gst_caps_get_size (caps); i++) { GstStructure *s = gst_caps_get_structure (caps, i); gst_structure_set_value (s, "channels", &range); } g_value_unset (&range); } GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps); return caps; }
static gboolean gst_jpegenc_setcaps (GstPad * pad, GstCaps * caps) { GstJpegEnc *enc = GST_JPEGENC (gst_pad_get_parent (pad)); GstVideoFormat format; gint width, height; gint fps_num, fps_den; gint par_num, par_den; gint i; GstCaps *othercaps; gboolean ret; /* get info from caps */ if (!gst_video_format_parse_caps (caps, &format, &width, &height)) goto refuse_caps; /* optional; pass along if present */ fps_num = fps_den = -1; par_num = par_den = -1; gst_video_parse_caps_framerate (caps, &fps_num, &fps_den); gst_video_parse_caps_pixel_aspect_ratio (caps, &par_num, &par_den); if (width == enc->width && height == enc->height && enc->format == format && fps_num == enc->fps_num && fps_den == enc->fps_den && par_num == enc->par_num && par_den == enc->par_den) return TRUE; /* store input description */ enc->format = format; enc->width = width; enc->height = height; enc->fps_num = fps_num; enc->fps_den = fps_den; enc->par_num = par_num; enc->par_den = par_den; /* prepare a cached image description */ enc->channels = 3 + (gst_video_format_has_alpha (format) ? 1 : 0); /* ... but any alpha is disregarded in encoding */ if (gst_video_format_is_gray (format)) enc->channels = 1; else enc->channels = 3; enc->h_max_samp = 0; enc->v_max_samp = 0; for (i = 0; i < enc->channels; ++i) { enc->cwidth[i] = gst_video_format_get_component_width (format, i, width); enc->cheight[i] = gst_video_format_get_component_height (format, i, height); enc->offset[i] = gst_video_format_get_component_offset (format, i, width, height); enc->stride[i] = gst_video_format_get_row_stride (format, i, width); enc->inc[i] = gst_video_format_get_pixel_stride (format, i); enc->h_samp[i] = GST_ROUND_UP_4 (width) / enc->cwidth[i]; enc->h_max_samp = MAX (enc->h_max_samp, enc->h_samp[i]); enc->v_samp[i] = GST_ROUND_UP_4 (height) / enc->cheight[i]; enc->v_max_samp = MAX (enc->v_max_samp, enc->v_samp[i]); } /* samp should only be 1, 2 or 4 */ g_assert (enc->h_max_samp <= 4); g_assert (enc->v_max_samp <= 4); /* now invert */ /* maximum is invariant, as one of the components should have samp 1 */ for (i = 0; i < enc->channels; ++i) { enc->h_samp[i] = enc->h_max_samp / enc->h_samp[i]; enc->v_samp[i] = enc->v_max_samp / enc->v_samp[i]; } enc->planar = (enc->inc[0] == 1 && enc->inc[1] == 1 && enc->inc[2] == 1); othercaps = gst_caps_copy (gst_pad_get_pad_template_caps (enc->srcpad)); gst_caps_set_simple (othercaps, "width", G_TYPE_INT, enc->width, "height", G_TYPE_INT, enc->height, NULL); if (enc->fps_den > 0) gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION, enc->fps_num, enc->fps_den, NULL); if (enc->par_den > 0) gst_caps_set_simple (othercaps, "pixel-aspect-ratio", GST_TYPE_FRACTION, enc->par_num, enc->par_den, NULL); ret = gst_pad_set_caps (enc->srcpad, othercaps); gst_caps_unref (othercaps); if (ret) gst_jpegenc_resync (enc); gst_object_unref (enc); return ret; /* ERRORS */ refuse_caps: { GST_WARNING_OBJECT (enc, "refused caps %" GST_PTR_FORMAT, caps); gst_object_unref (enc); return FALSE; } }
static GstCaps * gst_phoenixsrc_get_caps (GstBaseSrc * bsrc, GstCaps * filter) { GstPhoenixSrc *src = GST_PHOENIX_SRC (bsrc); etStat eStat = PHX_OK; /* Status variable */ etParamValue eParamValue = PHX_INVALID_PARAMVALUE; ui32 dwParamValue = 0; guint32 phx_format; gint width, height; gint bpp, depth, endianness; GstVideoFormat videoFormat; gboolean is_gray16 = FALSE, is_bayer = FALSE; GstVideoInfo vinfo; GstCaps *caps; if (!src->hCamera) { return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src)); } /* Create video info */ gst_video_info_init (&vinfo); /* Get format (mono, Bayer, RBG, etc.) */ eStat = PHX_ParameterGet (src->hCamera, PHX_DST_FORMAT, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; phx_format = dwParamValue; /* Get endianness */ eStat = PHX_ParameterGet (src->hCamera, PHX_DST_ENDIAN, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; endianness = (dwParamValue == PHX_DST_LITTLE_ENDIAN) ? G_LITTLE_ENDIAN : G_BIG_ENDIAN; /* get width */ eStat = PHX_ParameterGet (src->hCamera, PHX_ROI_XLENGTH_SCALED, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; width = dwParamValue; /* get height */ eStat = PHX_ParameterGet (src->hCamera, PHX_ROI_YLENGTH_SCALED, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; height = dwParamValue; switch (phx_format) { case PHX_DST_FORMAT_Y8: videoFormat = GST_VIDEO_FORMAT_GRAY8; break; case PHX_DST_FORMAT_Y10: bpp = 10; is_gray16 = TRUE; break; case PHX_DST_FORMAT_Y12: bpp = 12; is_gray16 = TRUE; break; case PHX_DST_FORMAT_Y14: bpp = 14; is_gray16 = TRUE; break; case PHX_DST_FORMAT_Y16: bpp = 16; is_gray16 = TRUE; break; case PHX_DST_FORMAT_BAY8: bpp = 8; depth = 8; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY10: bpp = 10; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY12: bpp = 12; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY14: bpp = 14; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_BAY16: bpp = 16; depth = 16; is_bayer = TRUE; break; case PHX_DST_FORMAT_RGB15: videoFormat = GST_VIDEO_FORMAT_RGB15; break; case PHX_DST_FORMAT_RGB16: videoFormat = GST_VIDEO_FORMAT_RGB16; break; case PHX_DST_FORMAT_RGB24: videoFormat = GST_VIDEO_FORMAT_RGB; break; case PHX_DST_FORMAT_RGB32: /* FIXME: what is the format of this? */ case PHX_DST_FORMAT_XRGB32: videoFormat = GST_VIDEO_FORMAT_xRGB; break; default: videoFormat = GST_VIDEO_FORMAT_UNKNOWN; } if (is_gray16) videoFormat = (endianness == G_LITTLE_ENDIAN) ? GST_VIDEO_FORMAT_GRAY16_LE : GST_VIDEO_FORMAT_GRAY16_BE; if (is_bayer) { const gchar *bay_fmt; eStat = PHX_ParameterGet (src->hCamera, PHX_CAM_SRC_COL, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; switch (dwParamValue) { case PHX_CAM_SRC_BAY_RGGB: bay_fmt = (depth == 16) ? "rggb16" : "rggb"; break; case PHX_CAM_SRC_BAY_GRBG: bay_fmt = (depth == 16) ? "grbg16" : "grbg"; break; case PHX_CAM_SRC_BAY_GBRG: bay_fmt = (depth == 16) ? "gbrg16" : "gbrg"; break; case PHX_CAM_SRC_BAY_BGGR: bay_fmt = (depth == 16) ? "bggr16" : "bggr"; break; default: GST_ERROR_OBJECT (src, "Unknown PHX_CAM_SRC_COL=%d", dwParamValue); goto Error; } if (depth == 8) { caps = gst_caps_new_simple ("video/x-bayer", "format", G_TYPE_STRING, bay_fmt, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } else if (depth == 16) { caps = gst_caps_new_simple ("video/x-bayer", "format", G_TYPE_STRING, bay_fmt, "bpp", G_TYPE_INT, bpp, "endianness", G_TYPE_INT, endianness, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } } else if (videoFormat != GST_VIDEO_FORMAT_UNKNOWN) { vinfo.finfo = gst_video_format_get_info (videoFormat); vinfo.width = width; vinfo.height = height; caps = gst_video_info_to_caps (&vinfo); if (is_gray16) { GValue val = G_VALUE_INIT; GstStructure *s = gst_caps_get_structure (caps, 0); g_value_init (&val, G_TYPE_INT); g_value_set_int (&val, bpp); gst_structure_set_value (s, "bpp", &val); g_value_unset (&val); } } else { GST_ELEMENT_ERROR (src, STREAM, WRONG_TYPE, (("Unknown or unsupported color format.")), (NULL)); goto Error; } /* get buffer size; width (in bytes) and height (in lines) */ eStat = PHX_ParameterGet (src->hCamera, PHX_BUF_DST_XLENGTH, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; src->phx_stride = dwParamValue; eStat = PHX_ParameterGet (src->hCamera, PHX_BUF_DST_YLENGTH, &dwParamValue); if (PHX_OK != eStat) goto ResourceSettingsError; /* TODO: should we be using PHX_BUF_DST_YLENGTH or PHX_ROI_YLENGTH_SCALED for height? */ g_assert (dwParamValue == height); GST_DEBUG_OBJECT (src, "The caps before filtering are %" GST_PTR_FORMAT, caps); if (filter) { GstCaps *tmp = gst_caps_intersect (caps, filter); gst_caps_unref (caps); caps = tmp; } GST_DEBUG_OBJECT (src, "The caps after filtering are %" GST_PTR_FORMAT, caps); return caps; ResourceSettingsError: GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (("Failed to get Phoenix parameters.")), (NULL)); Error: return NULL; }
static gboolean gst_vtdec_negotiate (GstVideoDecoder * decoder) { GstVideoCodecState *output_state = NULL; GstCaps *peercaps = NULL, *caps = NULL, *templcaps = NULL, *prevcaps = NULL; GstVideoFormat format; GstStructure *structure; const gchar *s; GstVtdec *vtdec; OSStatus err = noErr; GstCapsFeatures *features = NULL; gboolean output_textures; vtdec = GST_VTDEC (decoder); if (vtdec->session) gst_vtdec_push_frames_if_needed (vtdec, TRUE, FALSE); output_state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (vtdec)); if (output_state) { prevcaps = gst_caps_ref (output_state->caps); gst_video_codec_state_unref (output_state); } peercaps = gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec), NULL); if (prevcaps && gst_caps_can_intersect (prevcaps, peercaps)) { /* The hardware decoder can become (temporarily) unavailable across * VTDecompressionSessionCreate/Destroy calls. So if the currently configured * caps are still accepted by downstream we keep them so we don't have to * destroy and recreate the session. */ GST_INFO_OBJECT (vtdec, "current and peer caps are compatible, keeping current caps"); caps = gst_caps_ref (prevcaps); } else { templcaps = gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD (decoder)); caps = gst_caps_intersect_full (peercaps, templcaps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (templcaps); } gst_caps_unref (peercaps); caps = gst_caps_truncate (gst_caps_make_writable (caps)); structure = gst_caps_get_structure (caps, 0); s = gst_structure_get_string (structure, "format"); format = gst_video_format_from_string (s); features = gst_caps_get_features (caps, 0); if (features) features = gst_caps_features_copy (features); output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec), format, vtdec->video_info.width, vtdec->video_info.height, vtdec->input_state); output_state->caps = gst_video_info_to_caps (&output_state->info); if (features) { gst_caps_set_features (output_state->caps, 0, features); output_textures = gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY); if (output_textures) gst_caps_set_simple (output_state->caps, "texture-target", G_TYPE_STRING, #if !HAVE_IOS GST_GL_TEXTURE_TARGET_RECTANGLE_STR, #else GST_GL_TEXTURE_TARGET_2D_STR, #endif NULL); } gst_caps_unref (caps); if (!prevcaps || !gst_caps_is_equal (prevcaps, output_state->caps)) { gboolean renegotiating = vtdec->session != NULL; GST_INFO_OBJECT (vtdec, "negotiated output format %" GST_PTR_FORMAT " previous %" GST_PTR_FORMAT, output_state->caps, prevcaps); if (vtdec->session) gst_vtdec_invalidate_session (vtdec); err = gst_vtdec_create_session (vtdec, format, TRUE); if (err == noErr) { GST_INFO_OBJECT (vtdec, "using hardware decoder"); } else if (err == kVTVideoDecoderNotAvailableNowErr && renegotiating) { GST_WARNING_OBJECT (vtdec, "hw decoder not available anymore"); err = gst_vtdec_create_session (vtdec, format, FALSE); } if (err != noErr) { GST_ELEMENT_ERROR (vtdec, RESOURCE, FAILED, (NULL), ("VTDecompressionSessionCreate returned %d", (int) err)); } } if (vtdec->texture_cache != NULL && !output_textures) { gst_video_texture_cache_free (vtdec->texture_cache); vtdec->texture_cache = NULL; } if (err == noErr && output_textures) { /* call this regardless of whether caps have changed or not since a new * local context could have become available */ gst_gl_context_helper_ensure_context (vtdec->ctxh); GST_INFO_OBJECT (vtdec, "pushing textures, context %p old context %p", vtdec->ctxh->context, vtdec->texture_cache ? vtdec->texture_cache->ctx : NULL); if (vtdec->texture_cache && vtdec->texture_cache->ctx != vtdec->ctxh->context) { gst_video_texture_cache_free (vtdec->texture_cache); vtdec->texture_cache = NULL; } if (!vtdec->texture_cache) setup_texture_cache (vtdec, vtdec->ctxh->context); } if (prevcaps) gst_caps_unref (prevcaps); if (err != noErr) return FALSE; return GST_VIDEO_DECODER_CLASS (gst_vtdec_parent_class)->negotiate (decoder); }
static GstFlowReturn gst_flxdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstCaps *caps; guint avail; GstFlowReturn res = GST_FLOW_OK; GstFlxDec *flxdec; FlxHeader *flxh; g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR); flxdec = (GstFlxDec *) parent; g_return_val_if_fail (flxdec != NULL, GST_FLOW_ERROR); gst_adapter_push (flxdec->adapter, buf); avail = gst_adapter_available (flxdec->adapter); if (flxdec->state == GST_FLXDEC_READ_HEADER) { if (avail >= FlxHeaderSize) { const guint8 *data = gst_adapter_map (flxdec->adapter, FlxHeaderSize); GstCaps *templ; memcpy ((gchar *) & flxdec->hdr, data, FlxHeaderSize); FLX_HDR_FIX_ENDIANNESS (&(flxdec->hdr)); gst_adapter_unmap (flxdec->adapter); gst_adapter_flush (flxdec->adapter, FlxHeaderSize); flxh = &flxdec->hdr; /* check header */ if (flxh->type != FLX_MAGICHDR_FLI && flxh->type != FLX_MAGICHDR_FLC && flxh->type != FLX_MAGICHDR_FLX) goto wrong_type; GST_LOG ("size : %d", flxh->size); GST_LOG ("frames : %d", flxh->frames); GST_LOG ("width : %d", flxh->width); GST_LOG ("height : %d", flxh->height); GST_LOG ("depth : %d", flxh->depth); GST_LOG ("speed : %d", flxh->speed); flxdec->next_time = 0; if (flxh->type == FLX_MAGICHDR_FLI) { flxdec->frame_time = JIFFIE * flxh->speed; } else if (flxh->speed == 0) { flxdec->frame_time = GST_SECOND / 70; } else { flxdec->frame_time = flxh->speed * GST_MSECOND; } flxdec->duration = flxh->frames * flxdec->frame_time; GST_LOG ("duration : %" GST_TIME_FORMAT, GST_TIME_ARGS (flxdec->duration)); templ = gst_pad_get_pad_template_caps (flxdec->srcpad); caps = gst_caps_copy (templ); gst_caps_unref (templ); gst_caps_set_simple (caps, "width", G_TYPE_INT, flxh->width, "height", G_TYPE_INT, flxh->height, "framerate", GST_TYPE_FRACTION, (gint) GST_MSECOND, (gint) flxdec->frame_time / 1000, NULL); gst_pad_set_caps (flxdec->srcpad, caps); gst_caps_unref (caps); if (flxh->depth <= 8) flxdec->converter = flx_colorspace_converter_new (flxh->width, flxh->height); if (flxh->type == FLX_MAGICHDR_FLC || flxh->type == FLX_MAGICHDR_FLX) { GST_LOG ("(FLC) aspect_dx : %d", flxh->aspect_dx); GST_LOG ("(FLC) aspect_dy : %d", flxh->aspect_dy); GST_LOG ("(FLC) oframe1 : 0x%08x", flxh->oframe1); GST_LOG ("(FLC) oframe2 : 0x%08x", flxh->oframe2); } flxdec->size = (flxh->width * flxh->height); /* create delta and output frame */ flxdec->frame_data = g_malloc (flxdec->size); flxdec->delta_data = g_malloc (flxdec->size); flxdec->state = GST_FLXDEC_PLAYING; } } else if (flxdec->state == GST_FLXDEC_PLAYING) { GstBuffer *out; /* while we have enough data in the adapter */ while (avail >= FlxFrameChunkSize && res == GST_FLOW_OK) { FlxFrameChunk flxfh; guchar *chunk; const guint8 *data; GstMapInfo map; chunk = NULL; data = gst_adapter_map (flxdec->adapter, FlxFrameChunkSize); memcpy (&flxfh, data, FlxFrameChunkSize); FLX_FRAME_CHUNK_FIX_ENDIANNESS (&flxfh); gst_adapter_unmap (flxdec->adapter); switch (flxfh.id) { case FLX_FRAME_TYPE: /* check if we have the complete frame */ if (avail < flxfh.size) goto need_more_data; /* flush header */ gst_adapter_flush (flxdec->adapter, FlxFrameChunkSize); chunk = gst_adapter_take (flxdec->adapter, flxfh.size - FlxFrameChunkSize); FLX_FRAME_TYPE_FIX_ENDIANNESS ((FlxFrameType *) chunk); if (((FlxFrameType *) chunk)->chunks == 0) break; /* create 32 bits output frame */ // res = gst_pad_alloc_buffer_and_set_caps (flxdec->srcpad, // GST_BUFFER_OFFSET_NONE, // flxdec->size * 4, GST_PAD_CAPS (flxdec->srcpad), &out); // if (res != GST_FLOW_OK) // break; out = gst_buffer_new_and_alloc (flxdec->size * 4); /* decode chunks */ flx_decode_chunks (flxdec, ((FlxFrameType *) chunk)->chunks, chunk + FlxFrameTypeSize, flxdec->frame_data); /* save copy of the current frame for possible delta. */ memcpy (flxdec->delta_data, flxdec->frame_data, flxdec->size); gst_buffer_map (out, &map, GST_MAP_WRITE); /* convert current frame. */ flx_colorspace_convert (flxdec->converter, flxdec->frame_data, map.data); gst_buffer_unmap (out, &map); GST_BUFFER_TIMESTAMP (out) = flxdec->next_time; flxdec->next_time += flxdec->frame_time; res = gst_pad_push (flxdec->srcpad, out); break; default: /* check if we have the complete frame */ if (avail < flxfh.size) goto need_more_data; gst_adapter_flush (flxdec->adapter, flxfh.size); break; } if (chunk) g_free (chunk); avail = gst_adapter_available (flxdec->adapter); } } need_more_data: return res; /* ERRORS */ wrong_type: { GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, (NULL), ("not a flx file (type %x)", flxh->type)); gst_object_unref (flxdec); return GST_FLOW_ERROR; } }
static GstCaps * gst_cv_sobel_transform_caps (GstBaseTransform * trans, GstPadDirection dir, GstCaps * caps, GstCaps * filter) { GstCaps *to, *ret; GstCaps *templ; GstStructure *structure; GstPad *other; gint i; to = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (caps); i++) { const GValue *v; GValue list = { 0, }; GValue val = { 0, }; structure = gst_structure_copy (gst_caps_get_structure (caps, i)); g_value_init (&list, GST_TYPE_LIST); g_value_init (&val, G_TYPE_STRING); g_value_set_string (&val, "GRAY8"); gst_value_list_append_value (&list, &val); g_value_unset (&val); g_value_init (&val, G_TYPE_STRING); #if G_BYTE_ORDER == G_BIG_ENDIAN g_value_set_string (&val, "GRAY16_BE"); #else g_value_set_string (&val, "GRAY16_LE"); #endif gst_value_list_append_value (&list, &val); g_value_unset (&val); v = gst_structure_get_value (structure, "format"); gst_value_list_merge (&val, v, &list); gst_structure_set_value (structure, "format", &val); g_value_unset (&val); g_value_unset (&list); gst_structure_remove_field (structure, "colorimetry"); gst_structure_remove_field (structure, "chroma-site"); gst_caps_append_structure (to, structure); } /* filter against set allowed caps on the pad */ other = (dir == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad; templ = gst_pad_get_pad_template_caps (other); ret = gst_caps_intersect (to, templ); gst_caps_unref (to); gst_caps_unref (templ); if (ret && filter) { GstCaps *intersection; intersection = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (ret); ret = intersection; } return ret; }
static GstCaps * gst_shape_wipe_mask_sink_getcaps (GstPad * pad, GstCaps * filter) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); GstCaps *ret, *tmp; guint i, n; if (gst_pad_has_current_caps (pad)) return gst_pad_get_current_caps (pad); tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL); if (tmp) { ret = gst_caps_intersect (tmp, gst_pad_get_pad_template_caps (self->video_sinkpad)); gst_caps_unref (tmp); } else { ret = gst_pad_get_pad_template_caps (self->video_sinkpad); } GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; tmp = gst_pad_peer_query_caps (self->srcpad, NULL); GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (ret, tmp); gst_caps_unref (ret); gst_caps_unref (tmp); ret = intersection; } GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret); if (gst_caps_is_empty (ret)) goto done; n = gst_caps_get_size (ret); tmp = gst_caps_new_empty (); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (ret, i); GstStructure *t; gst_structure_set_name (s, "video/x-raw"); gst_structure_remove_fields (s, "format", "framerate", NULL); if (self->vinfo.width && self->vinfo.height) gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height", G_TYPE_INT, self->vinfo.height, NULL); gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL); t = gst_structure_copy (s); gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL); gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL); gst_caps_append_structure (tmp, t); } gst_caps_append (ret, tmp); tmp = gst_pad_peer_query_caps (pad, NULL); GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp); if (tmp) { GstCaps *intersection; intersection = gst_caps_intersect (tmp, ret); gst_caps_unref (tmp); gst_caps_unref (ret); ret = intersection; } done: gst_object_unref (self); GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret); return ret; }
static gboolean gst_ffmpegaudenc_set_format (GstAudioEncoder * encoder, GstAudioInfo * info) { GstFFMpegAudEnc *ffmpegaudenc = (GstFFMpegAudEnc *) encoder; GstCaps *other_caps; GstCaps *allowed_caps; GstCaps *icaps; gsize frame_size; GstFFMpegAudEncClass *oclass = (GstFFMpegAudEncClass *) G_OBJECT_GET_CLASS (ffmpegaudenc); /* close old session */ if (ffmpegaudenc->opened) { gst_ffmpeg_avcodec_close (ffmpegaudenc->context); ffmpegaudenc->opened = FALSE; } /* if we set it in _getcaps we should set it also in _link */ ffmpegaudenc->context->strict_std_compliance = -1; /* user defined properties */ if (ffmpegaudenc->bitrate > 0) { GST_INFO_OBJECT (ffmpegaudenc, "Setting avcontext to bitrate %d", ffmpegaudenc->bitrate); ffmpegaudenc->context->bit_rate = ffmpegaudenc->bitrate; ffmpegaudenc->context->bit_rate_tolerance = ffmpegaudenc->bitrate; } else { GST_INFO_OBJECT (ffmpegaudenc, "Using avcontext default bitrate %d", ffmpegaudenc->context->bit_rate); } /* RTP payload used for GOB production (for Asterisk) */ if (ffmpegaudenc->rtp_payload_size) { ffmpegaudenc->context->rtp_payload_size = ffmpegaudenc->rtp_payload_size; } /* some other defaults */ ffmpegaudenc->context->rc_strategy = 2; ffmpegaudenc->context->b_frame_strategy = 0; ffmpegaudenc->context->coder_type = 0; ffmpegaudenc->context->context_model = 0; ffmpegaudenc->context->scenechange_threshold = 0; ffmpegaudenc->context->inter_threshold = 0; /* fetch pix_fmt and so on */ gst_ffmpeg_audioinfo_to_context (info, ffmpegaudenc->context); if (!ffmpegaudenc->context->time_base.den) { ffmpegaudenc->context->time_base.den = GST_AUDIO_INFO_RATE (info); ffmpegaudenc->context->time_base.num = 1; ffmpegaudenc->context->ticks_per_frame = 1; } if (ffmpegaudenc->context->channel_layout) { gst_ffmpeg_channel_layout_to_gst (ffmpegaudenc->context->channel_layout, ffmpegaudenc->context->channels, ffmpegaudenc->ffmpeg_layout); ffmpegaudenc->needs_reorder = (memcmp (ffmpegaudenc->ffmpeg_layout, info->position, sizeof (GstAudioChannelPosition) * ffmpegaudenc->context->channels) != 0); } /* open codec */ if (gst_ffmpeg_avcodec_open (ffmpegaudenc->context, oclass->in_plugin) < 0) { if (ffmpegaudenc->context->priv_data) gst_ffmpeg_avcodec_close (ffmpegaudenc->context); GST_DEBUG_OBJECT (ffmpegaudenc, "avenc_%s: Failed to open FFMPEG codec", oclass->in_plugin->name); return FALSE; } /* some codecs support more than one format, first auto-choose one */ GST_DEBUG_OBJECT (ffmpegaudenc, "picking an output format ..."); allowed_caps = gst_pad_get_allowed_caps (GST_AUDIO_ENCODER_SRC_PAD (encoder)); if (!allowed_caps) { GST_DEBUG_OBJECT (ffmpegaudenc, "... but no peer, using template caps"); /* we need to copy because get_allowed_caps returns a ref, and * get_pad_template_caps doesn't */ allowed_caps = gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (encoder)); } GST_DEBUG_OBJECT (ffmpegaudenc, "chose caps %" GST_PTR_FORMAT, allowed_caps); gst_ffmpeg_caps_with_codecid (oclass->in_plugin->id, oclass->in_plugin->type, allowed_caps, ffmpegaudenc->context); /* try to set this caps on the other side */ other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id, ffmpegaudenc->context, TRUE); if (!other_caps) { gst_caps_unref (allowed_caps); gst_ffmpeg_avcodec_close (ffmpegaudenc->context); GST_DEBUG ("Unsupported codec - no caps found"); return FALSE; } icaps = gst_caps_intersect (allowed_caps, other_caps); gst_caps_unref (allowed_caps); gst_caps_unref (other_caps); if (gst_caps_is_empty (icaps)) { gst_caps_unref (icaps); return FALSE; } icaps = gst_caps_truncate (icaps); if (!gst_audio_encoder_set_output_format (GST_AUDIO_ENCODER (ffmpegaudenc), icaps)) { gst_ffmpeg_avcodec_close (ffmpegaudenc->context); gst_caps_unref (icaps); return FALSE; } gst_caps_unref (icaps); frame_size = ffmpegaudenc->context->frame_size; if (frame_size > 1) { gst_audio_encoder_set_frame_samples_min (GST_AUDIO_ENCODER (ffmpegaudenc), frame_size); gst_audio_encoder_set_frame_samples_max (GST_AUDIO_ENCODER (ffmpegaudenc), frame_size); gst_audio_encoder_set_frame_max (GST_AUDIO_ENCODER (ffmpegaudenc), 1); } else { gst_audio_encoder_set_frame_samples_min (GST_AUDIO_ENCODER (ffmpegaudenc), 0); gst_audio_encoder_set_frame_samples_max (GST_AUDIO_ENCODER (ffmpegaudenc), 0); gst_audio_encoder_set_frame_max (GST_AUDIO_ENCODER (ffmpegaudenc), 0); } /* success! */ ffmpegaudenc->opened = TRUE; return TRUE; }
static gboolean gst_xviddec_setcaps (GstPad * pad, GstCaps * caps) { GstXvidDec *dec = GST_XVIDDEC (GST_PAD_PARENT (pad)); GstStructure *structure; GstCaps *allowed_caps; const GValue *val; GST_LOG_OBJECT (dec, "caps %" GST_PTR_FORMAT, caps); /* if there's something old around, remove it */ if (dec->handle) { gst_xviddec_unset (dec); } structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &dec->width); gst_structure_get_int (structure, "height", &dec->height); /* perhaps some fps info */ val = gst_structure_get_value (structure, "framerate"); if ((val != NULL) && GST_VALUE_HOLDS_FRACTION (val)) { dec->fps_n = gst_value_get_fraction_numerator (val); dec->fps_d = gst_value_get_fraction_denominator (val); } else { dec->fps_n = -1; dec->fps_d = 1; } /* perhaps some par info */ val = gst_structure_get_value (structure, "pixel-aspect-ratio"); if (val != NULL && GST_VALUE_HOLDS_FRACTION (val)) { dec->par_n = gst_value_get_fraction_numerator (val); dec->par_d = gst_value_get_fraction_denominator (val); } else { dec->par_n = 1; dec->par_d = 1; } /* we try to find the preferred/accept csp */ allowed_caps = gst_pad_get_allowed_caps (dec->srcpad); if (!allowed_caps) { GST_DEBUG_OBJECT (dec, "... but no peer, using template caps"); /* need to copy because get_allowed_caps returns a ref, and get_pad_template_caps doesn't */ allowed_caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad)); } GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps); /* pick the first one ... */ structure = gst_caps_get_structure (allowed_caps, 0); val = gst_structure_get_value (structure, "format"); if (val != NULL && G_VALUE_TYPE (val) == GST_TYPE_LIST) { GValue temp = { 0, }; gst_value_init_and_copy (&temp, gst_value_list_get_value (val, 0)); gst_structure_set_value (structure, "format", &temp); g_value_unset (&temp); } /* ... and use its info to get the csp */ dec->csp = gst_xvid_structure_to_csp (structure); if (dec->csp == -1) { GST_WARNING_OBJECT (dec, "failed to decide on colorspace, using I420"); dec->csp = XVID_CSP_I420; } dec->outbuf_size = gst_xvid_image_get_size (dec->csp, dec->width, dec->height); GST_LOG_OBJECT (dec, "csp=%d, outbuf_size=%d", dec->csp, dec->outbuf_size); gst_caps_unref (allowed_caps); /* now set up xvid ... */ if (!gst_xviddec_setup (dec)) { GST_ELEMENT_ERROR (GST_ELEMENT (dec), LIBRARY, INIT, (NULL), (NULL)); return FALSE; } return gst_xviddec_negotiate (dec, NULL); }
static void debug_dump_element_pad_link (GstPad * pad, GstElement * element, GstDebugGraphDetails details, GString * str, const gint indent) { GstElement *peer_element; GstPad *peer_pad; GstCaps *caps, *peer_caps; gchar *media = NULL; gchar *media_src = NULL, *media_sink = NULL; gchar *pad_name, *element_name; gchar *peer_pad_name, *peer_element_name; const gchar *spc = &spaces[MAX (sizeof (spaces) - (1 + indent * 2), 0)]; if ((peer_pad = gst_pad_get_peer (pad))) { if ((details & GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE) || (details & GST_DEBUG_GRAPH_SHOW_CAPS_DETAILS) ) { caps = gst_pad_get_current_caps (pad); if (!caps) caps = gst_pad_get_pad_template_caps (pad); peer_caps = gst_pad_get_current_caps (peer_pad); if (!peer_caps) peer_caps = gst_pad_get_pad_template_caps (peer_pad); media = debug_dump_describe_caps (caps, details); /* check if peer caps are different */ if (peer_caps && !gst_caps_is_equal (caps, peer_caps)) { gchar *tmp; tmp = debug_dump_describe_caps (peer_caps, details); if (gst_pad_get_direction (pad) == GST_PAD_SRC) { media_src = media; media_sink = tmp; } else { media_src = tmp; media_sink = media; } media = NULL; } gst_caps_unref (peer_caps); gst_caps_unref (caps); } pad_name = debug_dump_make_object_name (GST_OBJECT (pad)); if (element) { element_name = debug_dump_make_object_name (GST_OBJECT (element)); } else { element_name = g_strdup (""); } peer_pad_name = debug_dump_make_object_name (GST_OBJECT (peer_pad)); if ((peer_element = gst_pad_get_parent_element (peer_pad))) { peer_element_name = debug_dump_make_object_name (GST_OBJECT (peer_element)); } else { peer_element_name = g_strdup (""); } /* pad link */ if (media) { g_string_append_printf (str, "%s%s_%s -> %s_%s [label=\"%s\"]\n", spc, element_name, pad_name, peer_element_name, peer_pad_name, media); g_free (media); } else if (media_src && media_sink) { /* dot has some issues with placement of head and taillabels, * we need an empty label to make space */ g_string_append_printf (str, "%s%s_%s -> %s_%s [labeldistance=\"10\", labelangle=\"0\", " "label=\" \", " "taillabel=\"%s\", headlabel=\"%s\"]\n", spc, element_name, pad_name, peer_element_name, peer_pad_name, media_src, media_sink); g_free (media_src); g_free (media_sink); } else { g_string_append_printf (str, "%s%s_%s -> %s_%s\n", spc, element_name, pad_name, peer_element_name, peer_pad_name); } g_free (pad_name); g_free (element_name); g_free (peer_pad_name); g_free (peer_element_name); if (peer_element) gst_object_unref (peer_element); gst_object_unref (peer_pad); } }
/* we can only accept caps that we and downstream can handle. * if we have filtercaps set, use those to constrain the target caps. */ static GstCaps * gst_audiomixer_sink_getcaps (GstAggregator * agg, GstPad * pad, GstCaps * filter) { GstAudioAggregator *aagg; GstAudioMixer *audiomixer; GstCaps *result, *peercaps, *current_caps, *filter_caps; GstStructure *s; gint i, n; audiomixer = GST_AUDIO_MIXER (agg); aagg = GST_AUDIO_AGGREGATOR (agg); GST_OBJECT_LOCK (audiomixer); /* take filter */ if ((filter_caps = audiomixer->filter_caps)) { if (filter) filter_caps = gst_caps_intersect_full (filter, filter_caps, GST_CAPS_INTERSECT_FIRST); else gst_caps_ref (filter_caps); } else { filter_caps = filter ? gst_caps_ref (filter) : NULL; } GST_OBJECT_UNLOCK (audiomixer); if (filter_caps && gst_caps_is_empty (filter_caps)) { GST_WARNING_OBJECT (pad, "Empty filter caps"); return filter_caps; } /* get the downstream possible caps */ peercaps = gst_pad_peer_query_caps (agg->srcpad, filter_caps); /* get the allowed caps on this sinkpad */ GST_OBJECT_LOCK (audiomixer); current_caps = aagg->current_caps ? gst_caps_ref (aagg->current_caps) : NULL; if (current_caps == NULL) { current_caps = gst_pad_get_pad_template_caps (pad); if (!current_caps) current_caps = gst_caps_new_any (); } GST_OBJECT_UNLOCK (audiomixer); if (peercaps) { /* if the peer has caps, intersect */ GST_DEBUG_OBJECT (audiomixer, "intersecting peer and our caps"); result = gst_caps_intersect_full (peercaps, current_caps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (peercaps); gst_caps_unref (current_caps); } else { /* the peer has no caps (or there is no peer), just use the allowed caps * of this sinkpad. */ /* restrict with filter-caps if any */ if (filter_caps) { GST_DEBUG_OBJECT (audiomixer, "no peer caps, using filtered caps"); result = gst_caps_intersect_full (filter_caps, current_caps, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (current_caps); } else { GST_DEBUG_OBJECT (audiomixer, "no peer caps, using our caps"); result = current_caps; } } result = gst_caps_make_writable (result); n = gst_caps_get_size (result); for (i = 0; i < n; i++) { GstStructure *sref; s = gst_caps_get_structure (result, i); sref = gst_structure_copy (s); gst_structure_set (sref, "channels", GST_TYPE_INT_RANGE, 0, 2, NULL); if (gst_structure_is_subset (s, sref)) { /* This field is irrelevant when in mono or stereo */ gst_structure_remove_field (s, "channel-mask"); } gst_structure_free (sref); } if (filter_caps) gst_caps_unref (filter_caps); GST_LOG_OBJECT (audiomixer, "getting caps on pad %p,%s to %" GST_PTR_FORMAT, pad, GST_PAD_NAME (pad), result); return result; }
static gboolean gst_monoscope_src_negotiate (GstMonoscope * monoscope) { GstCaps *othercaps, *target; GstStructure *structure; GstCaps *templ; GstQuery *query; GstBufferPool *pool; GstStructure *config; guint size, min, max; templ = gst_pad_get_pad_template_caps (monoscope->srcpad); GST_DEBUG_OBJECT (monoscope, "performing negotiation"); /* see what the peer can do */ othercaps = gst_pad_peer_query_caps (monoscope->srcpad, NULL); if (othercaps) { target = gst_caps_intersect (othercaps, templ); gst_caps_unref (othercaps); gst_caps_unref (templ); if (gst_caps_is_empty (target)) goto no_format; target = gst_caps_truncate (target); } else { target = templ; } target = gst_caps_make_writable (target); structure = gst_caps_get_structure (target, 0); gst_structure_fixate_field_nearest_int (structure, "width", 320); gst_structure_fixate_field_nearest_int (structure, "height", 240); gst_structure_fixate_field_nearest_fraction (structure, "framerate", 25, 1); gst_monoscope_src_setcaps (monoscope, target); /* try to get a bufferpool now */ /* find a pool for the negotiated caps now */ query = gst_query_new_allocation (target, TRUE); if (!gst_pad_peer_query (monoscope->srcpad, query)) { } if (gst_query_get_n_allocation_pools (query) > 0) { /* we got configuration from our peer, parse them */ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); } else { pool = NULL; size = monoscope->outsize; min = max = 0; } if (pool == NULL) { /* we did not get a pool, make one ourselves then */ pool = gst_buffer_pool_new (); } config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, target, size, min, max); gst_buffer_pool_set_config (pool, config); if (monoscope->pool) { gst_buffer_pool_set_active (monoscope->pool, TRUE); gst_object_unref (monoscope->pool); } monoscope->pool = pool; /* and activate */ gst_buffer_pool_set_active (pool, TRUE); gst_caps_unref (target); return TRUE; no_format: { gst_caps_unref (target); return FALSE; } }