static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) { GstBaseVideoDecoder *base_video_decoder; GstBaseVideoDecoderClass *base_video_decoder_class; GstStructure *structure; const GValue *codec_data; GstVideoState *state; gboolean ret = TRUE; base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps); state = &base_video_decoder->state; if (state->codec_data) { gst_buffer_unref (state->codec_data); } memset (state, 0, sizeof (GstVideoState)); structure = gst_caps_get_structure (caps, 0); gst_video_format_parse_caps (caps, NULL, &state->width, &state->height); gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d); gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d); #if 0 /* requires 0.10.23 */ state->have_interlaced = gst_video_format_parse_caps_interlaced (caps, &state->interlaced); #else state->have_interlaced = gst_structure_get_boolean (structure, "interlaced", &state->interlaced); #endif codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) { state->codec_data = gst_value_get_buffer (codec_data); } if (base_video_decoder_class->start) { ret = base_video_decoder_class->start (base_video_decoder); } g_object_unref (base_video_decoder); return ret; }
gboolean AVSC_CC gst_avsynth_buf_pad_caps_to_vi (GstBuffer *buf, GstPad *pad, GstCaps *caps, AVS_VideoInfo *vi) { gboolean ret = TRUE; GstVideoFormat vf; gint fps_num = 0, fps_den = 0; gint width = 0, height = 0; gboolean interlaced; gint64 duration = -1; ret = gst_video_format_parse_caps (caps, &vf, &width, &height); if (!ret) { GST_ERROR ("Failed to convert caps to videoinfo - can't get format/width/height"); goto cleanup; } ret = gst_video_format_parse_caps_interlaced (caps, &interlaced); if (!ret) { GST_ERROR ("Failed to convert caps to videoinfo - can't get interlaced state"); goto cleanup; } ret = gst_video_parse_caps_framerate (caps, &fps_num, &fps_den); if (!ret) { GST_ERROR ("Failed to convert caps to videoinfo - can't get fps"); goto cleanup; } switch (vf) { case GST_VIDEO_FORMAT_I420: vi->pixel_type = AVS_CS_I420; break; case GST_VIDEO_FORMAT_YUY2: vi->pixel_type = AVS_CS_YUY2; break; case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_BGRA: vi->pixel_type = AVS_CS_BGR32; break; case GST_VIDEO_FORMAT_BGR: vi->pixel_type = AVS_CS_BGR24; break; case GST_VIDEO_FORMAT_YV12: vi->pixel_type = AVS_CS_YV12; break; default: ret = FALSE; } if (interlaced) { vi->image_type = AVS_IT_FIELDBASED; if (buf) { if (GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_TFF)) vi->image_type |= AVS_IT_TFF; else /* Apparently, GStreamer doesn't know what "unknown field order" is. * If you get wrong field order - file a bugreport against the source * element (or maybe a decoder?). Field order should be known. */ vi->image_type |= AVS_IT_BFF; } } vi->fps_numerator = fps_num; vi->fps_denominator = fps_den; vi->width = width; vi->height = height; duration = gst_avsynth_query_duration (pad, vi); vi->num_frames = duration; cleanup: return ret; }
static gboolean gst_interlace_setcaps (GstPad * pad, GstCaps * caps) { GstInterlace *interlace; gboolean ret; int width, height; GstVideoFormat format; gboolean interlaced = TRUE; int fps_n, fps_d; GstPad *otherpad; GstCaps *othercaps; const PulldownFormat *pdformat; interlace = GST_INTERLACE (gst_pad_get_parent (pad)); otherpad = (pad == interlace->srcpad) ? interlace->sinkpad : interlace->srcpad; ret = gst_video_format_parse_caps (caps, &format, &width, &height); gst_video_format_parse_caps_interlaced (caps, &interlaced); ret &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d); if (!ret) goto error; othercaps = gst_caps_copy (caps); pdformat = &formats[interlace->pattern]; if (pad == interlace->srcpad) { gst_caps_set_simple (othercaps, "interlaced", G_TYPE_BOOLEAN, FALSE, NULL); gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION, fps_n * pdformat->ratio_d, fps_d * pdformat->ratio_n, NULL); } else { gst_caps_set_simple (othercaps, "interlaced", G_TYPE_BOOLEAN, TRUE, NULL); gst_caps_set_simple (othercaps, "framerate", GST_TYPE_FRACTION, fps_n * pdformat->ratio_n, fps_d * pdformat->ratio_d, NULL); } ret = gst_pad_set_caps (otherpad, othercaps); if (!ret) goto error; interlace->format = format; interlace->width = width; interlace->height = height; interlace->phase_index = interlace->pattern_offset; if (pad == interlace->sinkpad) { gst_caps_replace (&interlace->srccaps, othercaps); interlace->src_fps_n = fps_n * pdformat->ratio_n; interlace->src_fps_d = fps_d * pdformat->ratio_d; } else { gst_caps_replace (&interlace->srccaps, caps); interlace->src_fps_n = fps_n; interlace->src_fps_d = fps_d; } error: g_object_unref (interlace); return ret; }
/* Parses a set of caps and tags in st and populates a GstDiscovererStreamInfo * structure (parent, if !NULL, otherwise it allocates one) */ static GstDiscovererStreamInfo * collect_information (GstDiscoverer * dc, const GstStructure * st, GstDiscovererStreamInfo * parent) { GstCaps *caps; GstStructure *caps_st, *tags_st; const gchar *name; int tmp, tmp2; guint utmp; gboolean btmp; if (!st || !gst_structure_id_has_field (st, _CAPS_QUARK)) { GST_WARNING ("Couldn't find caps !"); if (parent) return parent; else return (GstDiscovererStreamInfo *) gst_mini_object_new (GST_TYPE_DISCOVERER_STREAM_INFO); } gst_structure_id_get (st, _CAPS_QUARK, GST_TYPE_CAPS, &caps, NULL); caps_st = gst_caps_get_structure (caps, 0); name = gst_structure_get_name (caps_st); if (g_str_has_prefix (name, "audio/")) { GstDiscovererAudioInfo *info; if (parent) info = (GstDiscovererAudioInfo *) parent; else { info = (GstDiscovererAudioInfo *) gst_mini_object_new (GST_TYPE_DISCOVERER_AUDIO_INFO); info->parent.caps = caps; } if (gst_structure_get_int (caps_st, "rate", &tmp)) info->sample_rate = (guint) tmp; if (gst_structure_get_int (caps_st, "channels", &tmp)) info->channels = (guint) tmp; if (gst_structure_get_int (caps_st, "depth", &tmp)) info->depth = (guint) tmp; if (gst_structure_id_has_field (st, _TAGS_QUARK)) { gst_structure_id_get (st, _TAGS_QUARK, GST_TYPE_STRUCTURE, &tags_st, NULL); if (gst_structure_get_uint (tags_st, GST_TAG_BITRATE, &utmp) || gst_structure_get_uint (tags_st, GST_TAG_NOMINAL_BITRATE, &utmp)) info->bitrate = utmp; if (gst_structure_get_uint (tags_st, GST_TAG_MAXIMUM_BITRATE, &utmp)) info->max_bitrate = utmp; /* FIXME: Is it worth it to remove the tags we've parsed? */ info->parent.tags = gst_tag_list_merge (info->parent.tags, (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE); gst_structure_free (tags_st); } return (GstDiscovererStreamInfo *) info; } else if (g_str_has_prefix (name, "video/") || g_str_has_prefix (name, "image/")) { GstDiscovererVideoInfo *info; GstVideoFormat format; if (parent) info = (GstDiscovererVideoInfo *) parent; else { info = (GstDiscovererVideoInfo *) gst_mini_object_new (GST_TYPE_DISCOVERER_VIDEO_INFO); info->parent.caps = caps; } if (gst_video_format_parse_caps (caps, &format, &tmp, &tmp2)) { info->width = (guint) tmp; info->height = (guint) tmp2; } if (gst_structure_get_int (caps_st, "depth", &tmp)) info->depth = (guint) tmp; if (gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp, &tmp2)) { info->par_num = tmp; info->par_denom = tmp2; } if (gst_video_parse_caps_framerate (caps, &tmp, &tmp2)) { info->framerate_num = tmp; info->framerate_denom = tmp2; } if (gst_video_format_parse_caps_interlaced (caps, &btmp)) info->interlaced = btmp; if (gst_structure_id_has_field (st, _TAGS_QUARK)) { gst_structure_id_get (st, _TAGS_QUARK, GST_TYPE_STRUCTURE, &tags_st, NULL); if (gst_structure_get_uint (tags_st, GST_TAG_BITRATE, &utmp) || gst_structure_get_uint (tags_st, GST_TAG_NOMINAL_BITRATE, &utmp)) info->bitrate = utmp; if (gst_structure_get_uint (tags_st, GST_TAG_MAXIMUM_BITRATE, &utmp)) info->max_bitrate = utmp; /* FIXME: Is it worth it to remove the tags we've parsed? */ info->parent.tags = gst_tag_list_merge (info->parent.tags, (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE); gst_structure_free (tags_st); } return (GstDiscovererStreamInfo *) info; } else { /* None of the above - populate what information we can */ GstDiscovererStreamInfo *info; if (parent) info = parent; else { info = (GstDiscovererStreamInfo *) gst_mini_object_new (GST_TYPE_DISCOVERER_STREAM_INFO); info->caps = caps; } if (gst_structure_id_get (st, _TAGS_QUARK, GST_TYPE_STRUCTURE, &tags_st, NULL)) { info->tags = gst_tag_list_merge (info->tags, (GstTagList *) tags_st, GST_TAG_MERGE_REPLACE); gst_structure_free (tags_st); } return info; } }