static gboolean gst_vaapidecode_ensure_allowed_sinkpad_caps (GstVaapiDecode * decode) { GstCaps *caps, *allowed_sinkpad_caps; GArray *profiles; guint i; profiles = gst_vaapi_display_get_decode_profiles (GST_VAAPI_PLUGIN_BASE_DISPLAY (decode)); if (!profiles) goto error_no_profiles; allowed_sinkpad_caps = gst_caps_new_empty (); if (!allowed_sinkpad_caps) goto error_no_memory; for (i = 0; i < profiles->len; i++) { const GstVaapiProfile profile = g_array_index (profiles, GstVaapiProfile, i); const gchar *media_type_name; const gchar *profile_name; GstStructure *structure; media_type_name = gst_vaapi_profile_get_media_type_name (profile); if (!media_type_name) continue; caps = gst_caps_from_string (media_type_name); if (!caps) continue; structure = gst_caps_get_structure (caps, 0); profile_name = gst_vaapi_profile_get_name (profile); if (profile_name) gst_structure_set (structure, "profile", G_TYPE_STRING, profile_name, NULL); allowed_sinkpad_caps = gst_caps_merge (allowed_sinkpad_caps, caps); } decode->allowed_sinkpad_caps = gst_caps_simplify (allowed_sinkpad_caps); g_array_unref (profiles); return TRUE; /* ERRORS */ error_no_profiles: { GST_ERROR ("failed to retrieve VA decode profiles"); return FALSE; } error_no_memory: { GST_ERROR ("failed to allocate allowed-caps set"); g_array_unref (profiles); return FALSE; } }
static GstCaps * gst_alpha_color_transform_caps (GstBaseTransform * btrans, GstPadDirection direction, GstCaps * caps, GstCaps * filter) { GstCaps *tmpl_caps = NULL; GstCaps *result = NULL, *local_caps = NULL; guint i; local_caps = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (caps); i++) { GstStructure *structure = gst_structure_copy (gst_caps_get_structure (caps, i)); /* Remove any specific parameter from the structure */ gst_structure_remove_field (structure, "format"); gst_structure_remove_field (structure, "colorimetry"); gst_structure_remove_field (structure, "chroma-site"); gst_structure_set_name (structure, "video/x-raw"); gst_caps_append_structure (local_caps, structure); } /* Get the appropriate template */ if (direction == GST_PAD_SINK) { tmpl_caps = gst_static_pad_template_get_caps (&src_template); } else if (direction == GST_PAD_SRC) { tmpl_caps = gst_static_pad_template_get_caps (&sink_template); } /* Intersect with our template caps */ result = gst_caps_intersect (local_caps, tmpl_caps); gst_caps_unref (tmpl_caps); gst_caps_unref (local_caps); result = gst_caps_simplify (result); GST_LOG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, caps, result); if (filter) { GstCaps *intersection; GST_DEBUG_OBJECT (btrans, "Using filter caps %" GST_PTR_FORMAT, filter); intersection = gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (result); result = intersection; GST_DEBUG_OBJECT (btrans, "Intersection %" GST_PTR_FORMAT, result); } return result; }
/** * cheese_camera_device_get_caps_for_format: * @device: a #CheeseCameraDevice * @format: a #CheeseVideoFormat * * Get the #GstCaps for the given @format on the @device. * * Returns: (transfer full): the #GstCaps for the given @format */ GstCaps * cheese_camera_device_get_caps_for_format (CheeseCameraDevice *device, CheeseVideoFormat *format) { CheeseCameraDevicePrivate *priv; CheeseVideoFormatFull *full_format; GstCaps *desired_caps; GstCaps *subset_caps; gsize i; g_return_val_if_fail (CHEESE_IS_CAMERA_DEVICE (device), NULL); full_format = cheese_camera_device_find_full_format (device, format); if (!full_format) { GST_INFO ("Getting caps for %dx%d: no such format!", format->width, format->height); return gst_caps_new_empty (); } GST_INFO ("Getting caps for %dx%d @ %d/%d fps", full_format->width, full_format->height, full_format->fr_numerator, full_format->fr_denominator); desired_caps = gst_caps_new_empty (); for (i = 0; supported_formats[i] != NULL; i++) { gst_caps_append (desired_caps, cheese_camera_device_format_to_caps (supported_formats[i], full_format)); } priv = cheese_camera_device_get_instance_private (device); subset_caps = gst_caps_intersect (desired_caps, priv->caps); subset_caps = gst_caps_simplify (subset_caps); gst_caps_unref (desired_caps); GST_INFO ("Got %" GST_PTR_FORMAT, subset_caps); return subset_caps; }
static gboolean ensure_allowed_caps (GstKMSSink * self, drmModePlane * plane, drmModeRes * res) { GstCaps *out_caps, *caps; int i; GstVideoFormat fmt; const gchar *format; if (self->allowed_caps) return TRUE; out_caps = gst_caps_new_empty (); if (!out_caps) return FALSE; for (i = 0; i < plane->count_formats; i++) { fmt = gst_video_format_from_drm (plane->formats[i]); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) { GST_INFO_OBJECT (self, "ignoring format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (plane->formats[i])); continue; } format = gst_video_format_to_string (fmt); caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", GST_TYPE_INT_RANGE, res->min_width, res->max_width, "height", GST_TYPE_INT_RANGE, res->min_height, res->max_height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); if (!caps) continue; out_caps = gst_caps_merge (out_caps, caps); } self->allowed_caps = gst_caps_simplify (out_caps); GST_DEBUG_OBJECT (self, "allowed caps = %" GST_PTR_FORMAT, self->allowed_caps); return TRUE; }
/* * Get Caps * * As can be seen this method violates the API between the GST element * and the Android device. Should be fixed... (FIXME) * */ static GstCaps * gst_android_video_source_get_caps(GstBaseSrc * p_basesrc, GstCaps * p_filter) { int i; int minFps; int maxFps; int fmtPos; int minWidth, minHeight; int maxWidth, maxHeight; GstCaps *caps; GstCaps *capsVec = NULL; GA_LOGTRACE("ENTER %s --xx--> thread(%ld)", __FUNCTION__, pthread_self()); GstAndroidVideoSource *p_src = GST_ANDROIDVIDEOSOURCE(p_basesrc); if (GST_STATE(p_src) == GST_STATE_NULL || GST_STATE(p_src) <= GST_STATE_NULL) { GA_LOGINFO("%s: Called in state %s. Don't know device support yet. Will return NULL caps.", __FUNCTION__, gst_element_state_get_name(GST_STATE(p_src))); return NULL; } if (VCD_GetWidestFpsRange(p_src->m_devHandle, &minFps, &maxFps) != VCD_NO_ERROR) { return NULL; } if (VCD_NO_ERROR != VCD_GetMinResolution(p_src->m_devHandle, &minWidth, &minHeight)) { return NULL; } if (VCD_NO_ERROR != VCD_GetMaxResolution(p_src->m_devHandle, &maxWidth, &maxHeight)) { return NULL; } capsVec = gst_caps_new_empty(); for (fmtPos = 0; fmtPos < VCD_getMediaSupportFmtLen(p_src->m_devHandle); fmtPos++) { int fmt = VCD_getMediaSupportFmt(p_src->m_devHandle)[fmtPos]; GstVideoFormat gstVideoFmt = vcd_int_to_gst_video_format(fmt); if (gstVideoFmt != GST_VIDEO_FORMAT_UNKNOWN) { caps = gst_caps_new_simple( "video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gstVideoFmt), "width", GST_TYPE_INT_RANGE, minWidth, maxWidth, "height", GST_TYPE_INT_RANGE, minHeight, maxHeight, #ifdef ACCEPT_FPS_CAPS_DOWN_TO_1FPS "framerate", GST_TYPE_FRACTION_RANGE, 1000, ANDROID_FPS_DENOMINATOR, maxFps, ANDROID_FPS_DENOMINATOR, #else "framerate", GST_TYPE_FRACTION_RANGE, minFps, ANDROID_FPS_DENOMINATOR, maxFps, ANDROID_FPS_DENOMINATOR, #endif "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL); gst_caps_append(capsVec, caps); } } // Some Android devices report one or more supported formats (or other stuff) // more than once, which gives caps duplicates. Those are removed by doing // gst_caps_do_simplify()... capsVec = gst_caps_simplify(capsVec); GA_LOGINFO("%s: By Android video device supported CAPS:", __FUNCTION__); GA_LOGINFO("%s:-----------------------------------------------------------", __FUNCTION__); for (i = 0; i < gst_caps_get_size(capsVec); i++) { // Android log cannot print that long messages so we need to take one caps at a time GstCaps *capsCopy = gst_caps_copy_nth(capsVec, i); GA_LOGINFO("CAPS%d: %s", i+1, gst_caps_to_string(capsCopy)); gst_caps_unref(capsCopy); } GA_LOGINFO("%s:-----------------------------------------------------------", __FUNCTION__); GA_LOGTRACE("EXIT %s", __FUNCTION__); return capsVec; }
/* Ouput buffer preparation ... if the buffer has no caps, and our allowed * output caps is fixed, then send the caps downstream, making sure caps are * sent before segment event. * * This ensures that caps event is sent if we can, so that pipelines like: * gst-launch filesrc location=rawsamples.raw ! * audio/x-raw,format=S16LE,rate=48000,channels=2 ! alsasink * will work. */ static GstFlowReturn gst_capsfilter_prepare_buf (GstBaseTransform * trans, GstBuffer * input, GstBuffer ** buf) { GstFlowReturn ret = GST_FLOW_OK; /* always return the input as output buffer */ *buf = input; if (!gst_pad_has_current_caps (trans->sinkpad)) { /* No caps. See if the output pad only supports fixed caps */ GstCapsFilter *filter = GST_CAPSFILTER (trans); GstCaps *out_caps; GList *pending_events = filter->pending_events; GST_LOG_OBJECT (trans, "Input pad does not have caps"); filter->pending_events = NULL; out_caps = gst_pad_get_current_caps (trans->srcpad); if (out_caps == NULL) { out_caps = gst_pad_get_allowed_caps (trans->srcpad); g_return_val_if_fail (out_caps != NULL, GST_FLOW_ERROR); } out_caps = gst_caps_simplify (out_caps); if (gst_caps_is_fixed (out_caps) && !gst_caps_is_empty (out_caps)) { GST_DEBUG_OBJECT (trans, "Have fixed output caps %" GST_PTR_FORMAT " to apply to srcpad", out_caps); if (!gst_pad_has_current_caps (trans->srcpad)) { if (gst_pad_set_caps (trans->srcpad, out_caps)) { if (pending_events) { GList *l; for (l = g_list_last (pending_events); l; l = l->prev) { GST_LOG_OBJECT (trans, "Forwarding %s event", GST_EVENT_TYPE_NAME (l->data)); GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, l->data); } g_list_free (pending_events); pending_events = NULL; } } else { ret = GST_FLOW_NOT_NEGOTIATED; } } g_list_free_full (pending_events, (GDestroyNotify) gst_event_unref); gst_caps_unref (out_caps); } else { gchar *caps_str = gst_caps_to_string (out_caps); GST_DEBUG_OBJECT (trans, "Cannot choose caps. Have unfixed output caps %" GST_PTR_FORMAT, out_caps); gst_caps_unref (out_caps); GST_ELEMENT_ERROR (trans, STREAM, FORMAT, ("Filter caps do not completely specify the output format"), ("Output caps are unfixed: %s", caps_str)); g_free (caps_str); g_list_free_full (pending_events, (GDestroyNotify) gst_event_unref); ret = GST_FLOW_ERROR; } } return ret; }
static GstCaps * gst_smpte_alpha_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * from, GstCaps * filter) { GstCaps *result, *tmp_caps, *tmpl_caps = NULL; gint i, j; tmp_caps = gst_caps_new_empty (); for (i = 0; i < gst_caps_get_size (from); i++) { GstStructure *structure; const GValue *val, *lval; GValue list = { 0, }; GValue aval = { 0, }; const gchar *str; structure = gst_structure_copy (gst_caps_get_structure (from, i)); /* we can transform I420 to AYUV, * so need to locate and substitute AYUV for the both of them */ val = gst_structure_get_value (structure, "format"); if (val && GST_VALUE_HOLDS_LIST (val)) { gboolean seen_ayuv = FALSE, seen_i420 = FALSE; g_value_init (&list, GST_TYPE_LIST); for (j = 0; j < gst_value_list_get_size (val); j++) { lval = gst_value_list_get_value (val, j); if ((str = g_value_get_string (lval))) { if (strcmp (str, "AYUV") == 0) { seen_ayuv = TRUE; } else if (strcmp (str, "I420") == 0) { seen_i420 = TRUE; } } } if (seen_ayuv && !seen_i420) { str = "I420"; } else if (seen_i420 && !seen_ayuv) { str = "AYUV"; } else str = NULL; if (str) { g_value_copy (val, &list); g_value_init (&aval, G_TYPE_STRING); g_value_set_string (&aval, str); gst_value_list_append_value (&list, &aval); g_value_reset (&aval); gst_structure_set_value (structure, "format", &list); g_value_unset (&list); } } else if (val && G_VALUE_HOLDS_STRING (val)) { if ((str = g_value_get_string (val)) && ((strcmp (str, "AYUV") == 0) || (strcmp (str, "I420") == 0))) { g_value_init (&list, GST_TYPE_LIST); g_value_init (&aval, G_TYPE_STRING); g_value_set_string (&aval, "AYUV"); gst_value_list_append_value (&list, &aval); g_value_reset (&aval); g_value_set_string (&aval, "I420"); gst_value_list_append_value (&list, &aval); g_value_reset (&aval); gst_structure_set_value (structure, "format", &list); g_value_unset (&list); } } else { gst_structure_remove_field (structure, "format"); } gst_structure_remove_field (structure, "colorimetry"); gst_structure_remove_field (structure, "chroma-site"); gst_caps_append_structure (tmp_caps, structure); } /* Get the appropriate template */ if (direction == GST_PAD_SINK) { tmpl_caps = gst_static_pad_template_get_caps (&gst_smpte_alpha_src_template); } else if (direction == GST_PAD_SRC) { tmpl_caps = gst_static_pad_template_get_caps (&gst_smpte_alpha_sink_template); } else { g_assert_not_reached (); } /* Intersect with our template caps */ result = gst_caps_intersect (tmp_caps, tmpl_caps); gst_caps_unref (tmpl_caps); gst_caps_unref (tmp_caps); result = gst_caps_simplify (result); GST_LOG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, from, result); if (filter) { GstCaps *intersection; GST_DEBUG_OBJECT (trans, "Using filter caps %" GST_PTR_FORMAT, filter); intersection = gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST); gst_caps_unref (result); result = intersection; GST_DEBUG_OBJECT (trans, "Intersection %" GST_PTR_FORMAT, result); } return result; }
static gboolean ensure_allowed_caps (GstKMSSink * self, drmModeConnector * conn, drmModePlane * plane, drmModeRes * res) { GstCaps *out_caps, *tmp_caps, *caps; int i, j; GstVideoFormat fmt; const gchar *format; drmModeModeInfo *mode; gint count_modes; if (self->allowed_caps) return TRUE; out_caps = gst_caps_new_empty (); if (!out_caps) return FALSE; if (conn && self->modesetting_enabled) count_modes = conn->count_modes; else count_modes = 1; for (i = 0; i < count_modes; i++) { tmp_caps = gst_caps_new_empty (); if (!tmp_caps) return FALSE; mode = NULL; if (conn && self->modesetting_enabled) mode = &conn->modes[i]; for (j = 0; j < plane->count_formats; j++) { fmt = gst_video_format_from_drm (plane->formats[j]); if (fmt == GST_VIDEO_FORMAT_UNKNOWN) { GST_INFO_OBJECT (self, "ignoring format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (plane->formats[j])); continue; } format = gst_video_format_to_string (fmt); if (mode) { caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", G_TYPE_INT, mode->hdisplay, "height", G_TYPE_INT, mode->vdisplay, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } else { caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, format, "width", GST_TYPE_INT_RANGE, res->min_width, res->max_width, "height", GST_TYPE_INT_RANGE, res->min_height, res->max_height, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL); } if (!caps) continue; tmp_caps = gst_caps_merge (tmp_caps, caps); } out_caps = gst_caps_merge (out_caps, gst_caps_simplify (tmp_caps)); } self->allowed_caps = gst_caps_simplify (out_caps); GST_DEBUG_OBJECT (self, "allowed caps = %" GST_PTR_FORMAT, self->allowed_caps); return (self->allowed_caps && !gst_caps_is_empty (self->allowed_caps)); }
static gboolean gst_vaapidecode_ensure_allowed_sinkpad_caps (GstVaapiDecode * decode) { GstCaps *caps, *allowed_sinkpad_caps; GArray *profiles; guint i; gboolean base_only = FALSE; gboolean have_high = FALSE; gboolean have_mvc = FALSE; gboolean have_svc = FALSE; profiles = gst_vaapi_display_get_decode_profiles (GST_VAAPI_PLUGIN_BASE_DISPLAY (decode)); if (!profiles) goto error_no_profiles; allowed_sinkpad_caps = gst_caps_new_empty (); if (!allowed_sinkpad_caps) goto error_no_memory; if (g_object_class_find_property (G_OBJECT_GET_CLASS (decode), "base-only")) { g_object_get (decode, "base-only", &base_only, NULL); } for (i = 0; i < profiles->len; i++) { const GstVaapiProfile profile = g_array_index (profiles, GstVaapiProfile, i); const gchar *media_type_name; const gchar *profile_name; GstStructure *structure; media_type_name = gst_vaapi_profile_get_media_type_name (profile); if (!media_type_name) continue; caps = gst_caps_from_string (media_type_name); if (!caps) continue; structure = gst_caps_get_structure (caps, 0); profile_name = gst_vaapi_profile_get_name (profile); if (profile_name) gst_structure_set (structure, "profile", G_TYPE_STRING, profile_name, NULL); allowed_sinkpad_caps = gst_caps_merge (allowed_sinkpad_caps, caps); have_mvc |= is_mvc_profile (profile); have_svc |= is_svc_profile (profile); have_high |= profile == GST_VAAPI_PROFILE_H264_HIGH; } if (have_high) { allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "progressive-high"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "constrained-high"); } if (base_only && (!have_mvc || !have_svc) && have_high) { if (!have_mvc) { GST_DEBUG ("base_only: force adding MVC profiles in caps"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "multiview-high"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "stereo-high"); } if (!have_svc) { GST_DEBUG ("base_only: force adding SVC profiles in caps"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-constrained-baseline"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-baseline"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-high-intra"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-constrained-high"); allowed_sinkpad_caps = add_h264_profile_in_caps (allowed_sinkpad_caps, "scalable-high"); } } decode->allowed_sinkpad_caps = gst_caps_simplify (allowed_sinkpad_caps); g_array_unref (profiles); return TRUE; /* ERRORS */ error_no_profiles: { GST_ERROR ("failed to retrieve VA decode profiles"); return FALSE; } error_no_memory: { GST_ERROR ("failed to allocate allowed-caps set"); g_array_unref (profiles); return FALSE; } }
/* compare output with ffmpegcolorspace */ static void colorspace_compare (gint width, gint height, gboolean comp) { GstBus *bus; GstElement *pipeline, *src, *filter1, *filter2, *csp, *fcsp, *fakesink; GstElement *queue1, *queue2, *tee, *compare; GstCaps *caps, *tcaps, *rcaps, *fcaps; GstCaps *ccaps; GstPad *pad; gint i, j; /* create elements */ pipeline = gst_pipeline_new ("pipeline"); src = gst_element_factory_make ("videotestsrc", "videotestsrc"); fail_unless (src != NULL); filter1 = gst_element_factory_make ("capsfilter", "capsfilter1"); fail_unless (filter1 != NULL); csp = gst_element_factory_make ("colorspace", "colorspace"); fail_unless (csp != NULL); filter2 = gst_element_factory_make ("capsfilter", "capsfilter2"); fail_unless (filter2 != NULL); if (comp) { fcsp = gst_element_factory_make ("ffmpegcolorspace", "ffmpegcolorspace"); fail_unless (fcsp != NULL); tee = gst_element_factory_make ("tee", "tee"); fail_unless (tee != NULL); queue1 = gst_element_factory_make ("queue", "queue1"); fail_unless (queue1 != NULL); queue2 = gst_element_factory_make ("queue", "queue2"); fail_unless (queue2 != NULL); compare = gst_element_factory_make ("compare", "compare"); fail_unless (compare != NULL); } else { fcsp = tee = queue1 = queue2 = compare = NULL; } fakesink = gst_element_factory_make ("fakesink", "fakesink"); fail_unless (fakesink != NULL); /* add and link */ gst_bin_add_many (GST_BIN (pipeline), src, filter1, filter2, csp, fakesink, tee, queue1, queue2, fcsp, compare, NULL); fail_unless (gst_element_link (src, filter1)); if (comp) { fail_unless (gst_element_link (filter1, tee)); fail_unless (gst_element_link (tee, queue1)); fail_unless (gst_element_link (queue1, fcsp)); fail_unless (gst_element_link_pads (fcsp, NULL, compare, "sink")); fail_unless (gst_element_link (tee, queue2)); fail_unless (gst_element_link (queue2, csp)); fail_unless (gst_element_link_pads (csp, NULL, compare, "check")); fail_unless (gst_element_link (compare, filter2)); } else { fail_unless (gst_element_link (filter1, csp)); fail_unless (gst_element_link (csp, filter2)); } fail_unless (gst_element_link (filter2, fakesink)); /* obtain possible caps combinations */ if (comp) { pad = gst_element_get_static_pad (fcsp, "sink"); fail_unless (pad != NULL); ccaps = gst_pad_get_pad_template_caps (pad); fail_unless (ccaps != NULL); fcaps = ccaps; gst_object_unref (pad); } else { fcaps = gst_caps_new_any (); } pad = gst_element_get_static_pad (csp, "sink"); fail_unless (pad != NULL); ccaps = gst_pad_get_pad_template_caps (pad); fail_unless (ccaps != NULL); gst_object_unref (pad); /* handle videotestsrc limitations */ pad = gst_element_get_static_pad (src, "src"); fail_unless (pad != NULL); caps = (GstCaps *) gst_pad_get_pad_template_caps (pad); fail_unless (caps != NULL); gst_object_unref (pad); rcaps = gst_caps_new_simple ("video/x-raw-yuv", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "color-matrix", G_TYPE_STRING, "sdtv", "chroma-site", G_TYPE_STRING, "mpeg2", NULL); gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "depth", G_TYPE_INT, 32, NULL)); /* FIXME also allow x-raw-gray if/when colorspace actually handles those */ /* limit to supported compare types */ if (comp) { gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, 25, 1, "depth", G_TYPE_INT, 24, NULL)); } tcaps = gst_caps_intersect (fcaps, ccaps); gst_caps_unref (fcaps); gst_caps_unref (ccaps); caps = gst_caps_intersect (tcaps, caps); gst_caps_unref (tcaps); tcaps = caps; caps = gst_caps_intersect (tcaps, rcaps); gst_caps_unref (tcaps); gst_caps_unref (rcaps); /* normalize to finally have a list of acceptable fixed formats */ caps = gst_caps_simplify (caps); caps = gst_caps_normalize (caps); /* set up for running stuff */ loop = g_main_loop_new (NULL, FALSE); bus = gst_element_get_bus (pipeline); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message::eos", (GCallback) message_cb, NULL); gst_object_unref (bus); g_object_set (src, "num-buffers", 5, NULL); if (comp) { /* set lower bound for ssim comparison, and allow slightly different caps */ g_object_set (compare, "method", 2, NULL); g_object_set (compare, "meta", 3, NULL); g_object_set (compare, "threshold", 0.90, NULL); g_object_set (compare, "upper", FALSE, NULL); } GST_INFO ("possible caps to check %d", gst_caps_get_size (caps)); /* loop over all input and output combinations */ for (i = 0; i < gst_caps_get_size (caps); i++) { for (j = 0; j < gst_caps_get_size (caps); j++) { GstCaps *in_caps, *out_caps; GstStructure *s; const gchar *fourcc; in_caps = gst_caps_copy_nth (caps, i); out_caps = gst_caps_copy_nth (caps, j); /* FIXME remove if videotestsrc and video format handle these properly */ s = gst_caps_get_structure (in_caps, 0); if ((fourcc = gst_structure_get_string (s, "format"))) { if (!strcmp (fourcc, "YUV9") || !strcmp (fourcc, "YVU9") || !strcmp (fourcc, "v216")) { gst_caps_unref (in_caps); gst_caps_unref (out_caps); continue; } } GST_INFO ("checking conversion from %" GST_PTR_FORMAT " (%d)" " to %" GST_PTR_FORMAT " (%d)", in_caps, i, out_caps, j); g_object_set (filter1, "caps", in_caps, NULL); g_object_set (filter2, "caps", out_caps, NULL); fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING) != GST_STATE_CHANGE_FAILURE); g_main_loop_run (loop); fail_unless (gst_element_set_state (pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS); gst_caps_unref (in_caps); gst_caps_unref (out_caps); } } gst_caps_unref (caps); gst_object_unref (pipeline); g_main_loop_unref (loop); }