static void check_filter_varargs (const gchar * name, GstEvent * event, gint num_buffers, const gchar * prop, va_list varargs) { static const struct { const int width, height; } resolutions[] = { { 384, 288}, { 385, 289}, { 385, 385}}; gint i, n, r; gint size; GstCaps *allcaps, *templ = gst_caps_from_string (VIDEO_CAPS_TEMPLATE_STRING); allcaps = gst_caps_normalize (templ); n = gst_caps_get_size (allcaps); for (i = 0; i < n; i++) { GstStructure *s = gst_caps_get_structure (allcaps, i); GstCaps *caps = gst_caps_new_empty (); gst_caps_append_structure (caps, gst_structure_copy (s)); /* try various resolutions */ for (r = 0; r < G_N_ELEMENTS (resolutions); ++r) { GstVideoInfo info; va_list args_cp; caps = gst_caps_make_writable (caps); gst_caps_set_simple (caps, "width", G_TYPE_INT, resolutions[r].width, "height", G_TYPE_INT, resolutions[r].height, "framerate", GST_TYPE_FRACTION, 25, 1, NULL); GST_DEBUG ("Testing with caps: %" GST_PTR_FORMAT, caps); gst_video_info_from_caps (&info, caps); size = GST_VIDEO_INFO_SIZE (&info); if (event) gst_event_ref (event); va_copy (args_cp, varargs); check_filter_caps (name, event, caps, size, num_buffers, prop, args_cp); va_end (args_cp); } gst_caps_unref (caps); } gst_caps_unref (allcaps); if (event) gst_event_unref (event); }
int main (int argc, char *argv[]) { GstCaps *caps; gst_init (&argc, &argv); caps = gst_caps_normalize (gst_static_caps_get (&sinkcaps)); g_print ("\n%s\n", gst_caps_to_string (caps)); caps = gst_caps_normalize (gst_static_caps_get (&mp1parsecaps)); g_print ("\n%s\n", gst_caps_to_string (caps)); caps = gst_caps_normalize (gst_static_caps_get (&rawcaps)); g_print ("\n%s\n", gst_caps_to_string (caps)); caps = gst_caps_normalize (gst_static_caps_get (&rawcaps2)); g_print ("\n%s\n", gst_caps_to_string (caps)); caps = gst_caps_normalize (gst_static_caps_get (&rawcaps3)); g_print ("\n%s\n", gst_caps_to_string (caps)); caps = gst_caps_normalize (gst_static_caps_get (&rawcaps4)); g_assert (gst_caps_get_size (caps) == 8); g_print ("\n%s\n", gst_caps_to_string (caps)); return 0; }
/* return a list of caps where we only need to set * width and height to get fixed caps */ static GList * video_crop_get_test_caps (GstElement * videocrop) { GstCaps *templ, *allowed_caps; GstPad *srcpad; GList *list = NULL; guint i; srcpad = gst_element_get_static_pad (videocrop, "src"); fail_unless (srcpad != NULL); templ = gst_pad_get_pad_template_caps (srcpad); fail_unless (templ != NULL); allowed_caps = gst_caps_normalize (templ); for (i = 0; i < gst_caps_get_size (allowed_caps); ++i) { GstStructure *new_structure; GstCaps *single_caps; single_caps = gst_caps_new_empty (); new_structure = gst_structure_copy (gst_caps_get_structure (allowed_caps, i)); gst_structure_set (new_structure, "framerate", GST_TYPE_FRACTION, 1, 1, NULL); gst_structure_remove_field (new_structure, "width"); gst_structure_remove_field (new_structure, "height"); gst_caps_append_structure (single_caps, new_structure); GST_DEBUG ("have caps %" GST_PTR_FORMAT, single_caps); /* should be fixed without width/height */ fail_unless (gst_caps_is_fixed (single_caps)); list = g_list_prepend (list, single_caps); } gst_caps_unref (allowed_caps); gst_object_unref (srcpad); return list; }
static gboolean gst_mpg123_audio_dec_set_format (GstAudioDecoder * dec, GstCaps * input_caps) { /* Using the parsed information upstream, and the list of allowed caps * downstream, this code tries to find a suitable audio info. It is important * to keep in mind that the rate and number of channels should never deviate * from the one the bitstream has, otherwise mpg123 has to mix channels and/or * resample (and as its docs say, its internal resampler is very crude). The * sample format, however, can be chosen freely, because the MPEG specs do not * mandate any special format. Therefore, rate and number of channels are taken * from upstream (which parsed the MPEG frames, so the input_caps contain * exactly the rate and number of channels the bitstream actually has), while * the sample format is chosen by trying out all caps that are allowed by * downstream. This way, the output is adjusted to what the downstream prefers. * * Also, the new output audio info is not set immediately. Instead, it is * considered the "next audioinfo". The code waits for mpg123 to notice the new * format (= when mpg123_decode_frame() returns MPG123_AUDIO_DEC_NEW_FORMAT), * and then sets the next audioinfo. Otherwise, the next audioinfo is set too * soon, which may cause problems with mp3s containing several format headers. * One example would be an mp3 with the first 30 seconds using 44.1 kHz, then * the next 30 seconds using 32 kHz. Rare, but possible. * * STEPS: * * 1. get rate and channels from input_caps * 2. get allowed caps from src pad * 3. for each structure in allowed caps: * 3.1. take format * 3.2. if the combination of format with rate and channels is unsupported by * mpg123, go to (3), or exit with error if there are no more structures * to try * 3.3. create next audioinfo out of rate,channels,format, and exit */ int rate, channels; GstMpg123AudioDec *mpg123_decoder; GstCaps *allowed_srccaps; guint structure_nr; gboolean match_found = FALSE; mpg123_decoder = GST_MPG123_AUDIO_DEC (dec); g_assert (mpg123_decoder->handle != NULL); mpg123_decoder->has_next_audioinfo = FALSE; /* Get rate and channels from input_caps */ { GstStructure *structure; gboolean err = FALSE; /* Only the first structure is used (multiple * input caps structures don't make sense */ structure = gst_caps_get_structure (input_caps, 0); if (!gst_structure_get_int (structure, "rate", &rate)) { err = TRUE; GST_ERROR_OBJECT (dec, "Input caps do not have a rate value"); } if (!gst_structure_get_int (structure, "channels", &channels)) { err = TRUE; GST_ERROR_OBJECT (dec, "Input caps do not have a channel value"); } if (err) return FALSE; } /* Get the caps that are allowed by downstream */ { GstCaps *allowed_srccaps_unnorm = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec)); allowed_srccaps = gst_caps_normalize (allowed_srccaps_unnorm); } /* Go through all allowed caps, pick the first one that matches */ for (structure_nr = 0; structure_nr < gst_caps_get_size (allowed_srccaps); ++structure_nr) { GstStructure *structure; gchar const *format_str; GstAudioFormat format; int encoding; structure = gst_caps_get_structure (allowed_srccaps, structure_nr); format_str = gst_structure_get_string (structure, "format"); if (format_str == NULL) { GST_DEBUG_OBJECT (dec, "Could not get format from src caps"); continue; } format = gst_audio_format_from_string (format_str); if (format == GST_AUDIO_FORMAT_UNKNOWN) { GST_DEBUG_OBJECT (dec, "Unknown format %s", format_str); continue; } switch (format) { case GST_AUDIO_FORMAT_S16: encoding = MPG123_ENC_SIGNED_16; break; case GST_AUDIO_FORMAT_S24: encoding = MPG123_ENC_SIGNED_24; break; case GST_AUDIO_FORMAT_S32: encoding = MPG123_ENC_SIGNED_32; break; case GST_AUDIO_FORMAT_U16: encoding = MPG123_ENC_UNSIGNED_16; break; case GST_AUDIO_FORMAT_U24: encoding = MPG123_ENC_UNSIGNED_24; break; case GST_AUDIO_FORMAT_U32: encoding = MPG123_ENC_UNSIGNED_32; break; case GST_AUDIO_FORMAT_F32: encoding = MPG123_ENC_FLOAT_32; break; default: GST_DEBUG_OBJECT (dec, "Format %s in srccaps is not supported", format_str); continue; } { int err; /* Cleanup old formats & set new one */ mpg123_format_none (mpg123_decoder->handle); err = mpg123_format (mpg123_decoder->handle, rate, channels, encoding); if (err != MPG123_OK) { GST_DEBUG_OBJECT (dec, "mpg123 cannot use caps %" GST_PTR_FORMAT " because mpg123_format() failed: %s", structure, mpg123_strerror (mpg123_decoder->handle)); continue; } } gst_audio_info_init (&(mpg123_decoder->next_audioinfo)); gst_audio_info_set_format (&(mpg123_decoder->next_audioinfo), format, rate, channels, NULL); GST_LOG_OBJECT (dec, "The next audio format is: %s, %u Hz, %u channels", format_str, rate, channels); mpg123_decoder->has_next_audioinfo = TRUE; match_found = TRUE; break; } gst_caps_unref (allowed_srccaps); return match_found; }
static GstFlowReturn gst_jasper_dec_negotiate (GstJasperDec * dec, jas_image_t * image) { GstFlowReturn flow_ret = GST_FLOW_OK; gint width, height, channels; gint i, j; gboolean negotiate = FALSE; jas_clrspc_t clrspc; GstCaps *allowed_caps, *caps; width = jas_image_width (image); height = jas_image_height (image); channels = jas_image_numcmpts (image); GST_LOG_OBJECT (dec, "%d x %d, %d components", width, height, channels); /* jp2c bitstream has no real colour space info (kept in container), * so decoder may only pretend to know, where it really does not */ if (!jas_clrspc_isunknown (dec->clrspc)) { clrspc = dec->clrspc; GST_DEBUG_OBJECT (dec, "forcing container supplied colour space %d", clrspc); jas_image_setclrspc (image, clrspc); } else clrspc = jas_image_clrspc (image); if (!width || !height || !channels || jas_clrspc_isunknown (clrspc)) goto fail_image; if (dec->width != width || dec->height != height || dec->channels != channels || dec->clrspc != clrspc) negotiate = TRUE; if (channels != 3) goto not_supported; for (i = 0; i < channels; i++) { gint cheight, cwidth, depth, sgnd; cheight = jas_image_cmptheight (image, i); cwidth = jas_image_cmptwidth (image, i); depth = jas_image_cmptprec (image, i); sgnd = jas_image_cmptsgnd (image, i); GST_LOG_OBJECT (dec, "image component %d, %dx%d, depth %d, sgnd %d", i, cwidth, cheight, depth, sgnd); if (depth != 8 || sgnd) goto not_supported; if (dec->cheight[i] != cheight || dec->cwidth[i] != cwidth) { dec->cheight[i] = cheight; dec->cwidth[i] = cwidth; negotiate = TRUE; } } if (!negotiate && dec->format != GST_VIDEO_FORMAT_UNKNOWN) goto done; /* clear and refresh to new state */ flow_ret = GST_FLOW_NOT_NEGOTIATED; dec->format = GST_VIDEO_FORMAT_UNKNOWN; dec->width = width; dec->height = height; dec->channels = channels; /* retrieve allowed caps, and find the first one that reasonably maps * to the parameters of the colourspace */ caps = gst_pad_get_allowed_caps (dec->srcpad); if (!caps) { GST_DEBUG_OBJECT (dec, "... but no peer, using template caps"); /* need to copy because get_allowed_caps returns a ref, and get_pad_template_caps doesn't */ caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad)); } /* avoid lists of fourcc, etc */ allowed_caps = gst_caps_normalize (caps); caps = NULL; GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps); for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { GstVideoFormat format; gboolean ok; if (caps) gst_caps_unref (caps); caps = gst_caps_copy_nth (allowed_caps, i); /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */ gst_pad_fixate_caps (dec->srcpad, caps); GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps); if (!gst_video_format_parse_caps (caps, &format, NULL, NULL)) continue; if (gst_video_format_is_rgb (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_RGB) { GST_DEBUG_OBJECT (dec, "trying RGB"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_R))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_G))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_RGB_B))) < 0) { GST_DEBUG_OBJECT (dec, "missing RGB color component"); continue; } } else if (gst_video_format_is_yuv (format) && jas_clrspc_fam (clrspc) == JAS_CLRSPC_FAM_YCBCR) { GST_DEBUG_OBJECT (dec, "trying YUV"); if ((dec->cmpt[0] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_Y))) < 0 || (dec->cmpt[1] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CB))) < 0 || (dec->cmpt[2] = jas_image_getcmptbytype (image, JAS_IMAGE_CT_COLOR (JAS_CLRSPC_CHANIND_YCBCR_CR))) < 0) { GST_DEBUG_OBJECT (dec, "missing YUV color component"); continue; } } else continue; /* match format with validity checks */ ok = TRUE; for (j = 0; j < channels; j++) { gint cmpt; cmpt = dec->cmpt[j]; if (dec->cwidth[cmpt] != gst_video_format_get_component_width (format, j, width) || dec->cheight[cmpt] != gst_video_format_get_component_height (format, j, height)) ok = FALSE; } /* commit to this format */ if (ok) { dec->format = format; break; } } if (caps) gst_caps_unref (caps); gst_caps_unref (allowed_caps); if (dec->format != GST_VIDEO_FORMAT_UNKNOWN) { /* cache some video format properties */ for (j = 0; j < channels; ++j) { dec->offset[j] = gst_video_format_get_component_offset (dec->format, j, dec->width, dec->height); dec->inc[j] = gst_video_format_get_pixel_stride (dec->format, j); dec->stride[j] = gst_video_format_get_row_stride (dec->format, j, dec->width); } dec->image_size = gst_video_format_get_size (dec->format, width, height); dec->alpha = gst_video_format_has_alpha (dec->format); if (dec->buf) g_free (dec->buf); dec->buf = g_new0 (glong, dec->width); caps = gst_video_format_new_caps (dec->format, dec->width, dec->height, dec->framerate_numerator, dec->framerate_denominator, 1, 1); GST_DEBUG_OBJECT (dec, "Set format to %d, size to %dx%d", dec->format, dec->width, dec->height); if (!gst_pad_set_caps (dec->srcpad, caps)) flow_ret = GST_FLOW_NOT_NEGOTIATED; else flow_ret = GST_FLOW_OK; gst_caps_unref (caps); } done: return flow_ret; /* ERRORS */ fail_image: { GST_DEBUG_OBJECT (dec, "Failed to process decoded image."); flow_ret = GST_FLOW_NOT_NEGOTIATED; goto done; } not_supported: { GST_DEBUG_OBJECT (dec, "Decoded image has unsupported colour space."); GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Unsupported colorspace")); flow_ret = GST_FLOW_ERROR; goto done; } }
CapsPtr Caps::getNormal() const { return CapsPtr::wrap(gst_caps_normalize(object<GstCaps>()), false); }
/* compare output with ffmpegcolorspace */ static void colorspace_compare (gint width, gint height, gboolean comp) { GstBus *bus; GstElement *pipeline, *src, *filter1, *filter2, *csp, *fcsp, *fakesink; GstElement *queue1, *queue2, *tee, *compare; GstCaps *caps, *tcaps, *rcaps, *fcaps; GstCaps *ccaps; GstPad *pad; gint i, j; /* create elements */ pipeline = gst_pipeline_new ("pipeline"); src = gst_element_factory_make ("videotestsrc", "videotestsrc"); fail_unless (src != NULL); filter1 = gst_element_factory_make ("capsfilter", "capsfilter1"); fail_unless (filter1 != NULL); csp = gst_element_factory_make ("colorspace", "colorspace"); fail_unless (csp != NULL); filter2 = gst_element_factory_make ("capsfilter", "capsfilter2"); fail_unless (filter2 != NULL); if (comp) { fcsp = gst_element_factory_make ("ffmpegcolorspace", "ffmpegcolorspace"); fail_unless (fcsp != NULL); tee = gst_element_factory_make ("tee", "tee"); fail_unless (tee != NULL); queue1 = gst_element_factory_make ("queue", "queue1"); fail_unless (queue1 != NULL); queue2 = gst_element_factory_make ("queue", "queue2"); fail_unless (queue2 != NULL); compare = gst_element_factory_make ("compare", "compare"); fail_unless (compare != NULL); } else { fcsp = tee = queue1 = queue2 = compare = NULL; } fakesink = gst_element_factory_make ("fakesink", "fakesink"); fail_unless (fakesink != NULL); /* add and link */ gst_bin_add_many (GST_BIN (pipeline), src, filter1, filter2, csp, fakesink, tee, queue1, queue2, fcsp, compare, NULL); fail_unless (gst_element_link (src, filter1)); if (comp) { fail_unless (gst_element_link (filter1, tee)); fail_unless (gst_element_link (tee, queue1)); fail_unless (gst_element_link (queue1, fcsp)); fail_unless (gst_element_link_pads (fcsp, NULL, compare, "sink")); fail_unless (gst_element_link (tee, queue2)); fail_unless (gst_element_link (queue2, csp)); fail_unless (gst_element_link_pads (csp, NULL, compare, "check")); fail_unless (gst_element_link (compare, filter2)); } else { fail_unless (gst_element_link (filter1, csp)); fail_unless (gst_element_link (csp, filter2)); } fail_unless (gst_element_link (filter2, fakesink)); /* obtain possible caps combinations */ if (comp) { pad = gst_element_get_static_pad (fcsp, "sink"); fail_unless (pad != NULL); ccaps = gst_pad_get_pad_template_caps (pad); fail_unless (ccaps != NULL); fcaps = ccaps; gst_object_unref (pad); } else { fcaps = gst_caps_new_any (); } pad = gst_element_get_static_pad (csp, "sink"); fail_unless (pad != NULL); ccaps = gst_pad_get_pad_template_caps (pad); fail_unless (ccaps != NULL); gst_object_unref (pad); /* handle videotestsrc limitations */ pad = gst_element_get_static_pad (src, "src"); fail_unless (pad != NULL); caps = (GstCaps *) gst_pad_get_pad_template_caps (pad); fail_unless (caps != NULL); gst_object_unref (pad); rcaps = gst_caps_new_simple ("video/x-raw-yuv", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "color-matrix", G_TYPE_STRING, "sdtv", "chroma-site", G_TYPE_STRING, "mpeg2", NULL); gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "depth", G_TYPE_INT, 32, NULL)); /* FIXME also allow x-raw-gray if/when colorspace actually handles those */ /* limit to supported compare types */ if (comp) { gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "depth", G_TYPE_INT, 24, NULL)); } tcaps = gst_caps_intersect (fcaps, ccaps); gst_caps_unref (fcaps); gst_caps_unref (ccaps); caps = gst_caps_intersect (tcaps, caps); gst_caps_unref (tcaps); tcaps = caps; caps = gst_caps_intersect (tcaps, rcaps); gst_caps_unref (tcaps); gst_caps_unref (rcaps); /* normalize to finally have a list of acceptable fixed formats */ caps = gst_caps_simplify (caps); caps = gst_caps_normalize (caps); /* set up for running stuff */ loop = g_main_loop_new (NULL, FALSE); bus = gst_element_get_bus (pipeline); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::eos", (GCallback) message_cb, NULL); gst_object_unref (bus); g_object_set (src, "num-buffers", 5, NULL); if (comp) { /* set lower bound for ssim comparison, and allow slightly different caps */ g_object_set (compare, "method", 2, NULL); g_object_set (compare, "meta", 3, NULL); g_object_set (compare, "threshold", 0.90, NULL); g_object_set (compare, "upper", FALSE, NULL); } GST_INFO ("possible caps to check %d", gst_caps_get_size (caps)); /* loop over all input and output combinations */ for (i = 0; i < gst_caps_get_size (caps); i++) { for (j = 0; j < gst_caps_get_size (caps); j++) { GstCaps *in_caps, *out_caps; GstStructure *s; const gchar *fourcc; in_caps = gst_caps_copy_nth (caps, i); out_caps = gst_caps_copy_nth (caps, j); /* FIXME remove if videotestsrc and video format handle these properly */ s = gst_caps_get_structure (in_caps, 0); if ((fourcc = gst_structure_get_string (s, "format"))) { if (!strcmp (fourcc, "YUV9") || !strcmp (fourcc, "YVU9") || !strcmp (fourcc, "v216")) { gst_caps_unref (in_caps); gst_caps_unref (out_caps); continue; } } GST_INFO ("checking conversion from %" GST_PTR_FORMAT " (%d)" " to %" GST_PTR_FORMAT " (%d)", in_caps, i, out_caps, j); g_object_set (filter1, "caps", in_caps, NULL); g_object_set (filter2, "caps", out_caps, NULL); fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE); g_main_loop_run (loop); fail_unless (gst_element_set_state (pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS); gst_caps_unref (in_caps); gst_caps_unref (out_caps); } } gst_caps_unref (caps); gst_object_unref (pipeline); g_main_loop_unref (loop); }
/* * Method: normalize * * Creates a new Gst::Caps that represents the same set of formats as self, * but contains no lists. Each list is expanded into separate structures * (as Hash objects). * * Returns: a new Gst::Caps object. */ static VALUE rg_normalize (VALUE self) { return RGST_CAPS_NEW (gst_caps_normalize (RGST_CAPS (self))); }