static gboolean gst_shape_wipe_video_sink_setcaps (GstPad * pad, GstCaps * caps) { GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad)); gboolean ret = TRUE; GstStructure *s; GstVideoFormat fmt; gint width, height; gint fps_n, fps_d; GST_DEBUG_OBJECT (pad, "Setting caps: %" GST_PTR_FORMAT, caps); s = gst_caps_get_structure (caps, 0); if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) || !gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) { ret = FALSE; goto done; } self->fmt = fmt; if (self->width != width || self->height != height) { g_mutex_lock (self->mask_mutex); self->width = width; self->height = height; if (self->mask) gst_buffer_unref (self->mask); self->mask = NULL; g_mutex_unlock (self->mask_mutex); } if (fps_n != 0) self->frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n); else self->frame_duration = 0; ret = gst_pad_set_caps (self->srcpad, caps); done: gst_object_unref (self); return ret; }
static void update_stream_info (GthMediaViewerPage *self) { GstElement *audio_sink; GstElement *video_sink; GstPad *audio_pad; GstPad *video_pad; g_object_get (self->priv->playbin, "audio-sink", &audio_sink, "video-sink", &video_sink, NULL); if (audio_sink != NULL) { audio_pad = gst_element_get_static_pad (GST_ELEMENT (audio_sink), "sink"); if (audio_pad != NULL) self->priv->has_audio = TRUE; } if (video_sink != NULL) { video_pad = gst_element_get_static_pad (GST_ELEMENT (video_sink), "sink"); if (video_pad != NULL) { GstCaps *caps; self->priv->has_video = TRUE; if ((caps = gst_pad_get_current_caps (video_pad)) != NULL) { GstStructure *structure; int video_width; int video_height; structure = gst_caps_get_structure (caps, 0); gst_structure_get_fraction (structure, "framerate", &self->priv->video_fps_n, &self->priv->video_fps_d); gst_structure_get_int (structure, "width", &video_width); gst_structure_get_int (structure, "height", &video_height); g_file_info_set_attribute_int32 (self->priv->file_data->info, "frame::width", video_width); g_file_info_set_attribute_int32 (self->priv->file_data->info, "frame::height", video_height); gst_caps_unref (caps); } } } }
void GStreamerReader::VideoPreroll() { /* The first video buffer has reached the video sink. Get width and height */ LOG(PR_LOG_DEBUG, "Video preroll"); GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); int PARNumerator, PARDenominator; #if GST_VERSION_MAJOR >= 1 GstCaps* caps = gst_pad_get_current_caps(sinkpad); memset (&mVideoInfo, 0, sizeof (mVideoInfo)); gst_video_info_from_caps(&mVideoInfo, caps); mFormat = mVideoInfo.finfo->format; mPicture.width = mVideoInfo.width; mPicture.height = mVideoInfo.height; PARNumerator = GST_VIDEO_INFO_PAR_N(&mVideoInfo); PARDenominator = GST_VIDEO_INFO_PAR_D(&mVideoInfo); #else GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad); gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height); if (!gst_video_parse_caps_pixel_aspect_ratio(caps, &PARNumerator, &PARDenominator)) { PARNumerator = 1; PARDenominator = 1; } #endif NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution"); // Calculate display size according to pixel aspect ratio. nsIntRect pictureRect(0, 0, mPicture.width, mPicture.height); nsIntSize frameSize = nsIntSize(mPicture.width, mPicture.height); nsIntSize displaySize = nsIntSize(mPicture.width, mPicture.height); ScaleDisplayByAspectRatio(displaySize, float(PARNumerator) / float(PARDenominator)); // If video frame size is overflow, stop playing. if (IsValidVideoRegion(frameSize, pictureRect, displaySize)) { GstStructure* structure = gst_caps_get_structure(caps, 0); gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen); mInfo.mVideo.mDisplay = ThebesIntSize(displaySize.ToIntSize()); mInfo.mVideo.mHasVideo = true; } else { LOG(PR_LOG_DEBUG, "invalid video region"); Eos(); } gst_caps_unref(caps); gst_object_unref(sinkpad); }
static GstCaps * gst_video_rate_divider_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstVideoRateDivider *videorate = GST_VIDEO_RATE_DIVIDER (trans); GstCaps *ret; GstStructure *s, *s2; gint rate_numerator, rate_denominator; ret = gst_caps_copy (caps); /* Any caps simply return */ if (gst_caps_is_any (caps)) { GST_DEBUG_OBJECT (trans, "transform caps: %" GST_PTR_FORMAT " (direction = %s) ANY", caps, get_direction_name(direction)); return ret; } s = gst_caps_get_structure (ret, 0); gst_structure_get_fraction (s, "framerate", &rate_numerator, &rate_denominator); GST_DEBUG_OBJECT (trans, "transform caps: %" GST_PTR_FORMAT " (direction = %s framerate = %d/%d)", caps, get_direction_name(direction), rate_numerator, rate_denominator); s2 = gst_structure_copy (s); if (direction == GST_PAD_SINK) { /* correct input flow framerate */ /* store inpute framerate */ videorate->from_rate_numerator = rate_numerator; videorate->from_rate_denominator = rate_denominator; gst_caps_remove_structure (ret, 0); gst_structure_set (s2, "framerate", GST_TYPE_FRACTION, rate_numerator, rate_denominator * videorate->factor, NULL); gst_caps_merge_structure (ret, s2); } return ret; }
static GUPnPDLNAFractionValue get_fraction_value_from_structure (const GstStructure *st, const gchar *name) { GUPnPDLNAFractionValue value = GUPNP_DLNA_FRACTION_VALUE_UNSET; if (st != NULL) { gint ndata; gint ddata; if (gst_structure_get_fraction (st, name, &ndata, &ddata)) { value.state = GUPNP_DLNA_VALUE_STATE_SET; value.numerator = ndata; value.denominator = ddata; } } return value; }
void QGstreamerPlayerSession::updateVideoResolutionTag() { QSize size; QSize aspectRatio; GstPad *pad = gst_element_get_static_pad(m_videoIdentity, "src"); GstCaps *caps = gst_pad_get_negotiated_caps(pad); if (caps) { const GstStructure *structure = gst_caps_get_structure(caps, 0); gst_structure_get_int(structure, "width", &size.rwidth()); gst_structure_get_int(structure, "height", &size.rheight()); gint aspectNum = 0; gint aspectDenum = 0; if (!size.isEmpty() && gst_structure_get_fraction( structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) { if (aspectDenum > 0) aspectRatio = QSize(aspectNum, aspectDenum); } gst_caps_unref(caps); } gst_object_unref(GST_OBJECT(pad)); QSize currentSize = m_tags.value("resolution").toSize(); QSize currentAspectRatio = m_tags.value("pixel-aspect-ratio").toSize(); if (currentSize != size || currentAspectRatio != aspectRatio) { if (aspectRatio.isEmpty()) m_tags.remove("pixel-aspect-ratio"); if (size.isEmpty()) { m_tags.remove("resolution"); } else { m_tags.insert("resolution", QVariant(size)); if (!aspectRatio.isEmpty()) m_tags.insert("pixel-aspect-ratio", QVariant(aspectRatio)); } emit tagsChanged(); } }
/*! Returns aspect ratio corrected resolution of \a caps. If caps doesn't have a valid size, and ampty QSize is returned. */ QSize QGstUtils::capsCorrectedResolution(const GstCaps *caps) { QSize size; if (caps) { const GstStructure *structure = gst_caps_get_structure(caps, 0); gst_structure_get_int(structure, "width", &size.rwidth()); gst_structure_get_int(structure, "height", &size.rheight()); gint aspectNum = 0; gint aspectDenum = 0; if (!size.isEmpty() && gst_structure_get_fraction( structure, "pixel-aspect-ratio", &aspectNum, &aspectDenum)) { if (aspectDenum > 0) size.setWidth(qRound(size.width()*aspectNum/aspectDenum)); } } return size; }
EXPORT_C #endif gboolean gst_video_parse_caps_pixel_aspect_ratio (GstCaps * caps, int *par_n, int *par_d) { GstStructure *structure; if (!gst_caps_is_fixed (caps)) return FALSE; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_fraction (structure, "pixel-aspect-ratio", par_n, par_d)) { *par_n = 1; *par_d = 1; } return TRUE; }
static gboolean gst_visual_gl_src_setcaps (GstPad * pad, GstCaps * caps) { GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad)); GstStructure *structure; structure = gst_caps_get_structure (caps, 0); GST_DEBUG_OBJECT (visual, "src pad got caps %" GST_PTR_FORMAT, caps); if (!gst_structure_get_int (structure, "width", &visual->width)) goto error; if (!gst_structure_get_int (structure, "height", &visual->height)) goto error; if (!gst_structure_get_fraction (structure, "framerate", &visual->fps_n, &visual->fps_d)) goto error; /* precalc some values */ visual->spf = gst_util_uint64_scale_int (visual->rate, visual->fps_d, visual->fps_n); visual->duration = gst_util_uint64_scale_int (GST_SECOND, visual->fps_d, visual->fps_n); gst_gl_display_gen_texture (visual->display, &visual->midtexture, visual->width, visual->height); gst_gl_display_gen_fbo (visual->display, visual->width, visual->height, &visual->fbo, &visual->depthbuffer); gst_object_unref (visual); return TRUE; /* ERRORS */ error: { GST_DEBUG_OBJECT (visual, "error parsing caps"); gst_object_unref (visual); return FALSE; } }
static gboolean gst_video_segment_clip_set_caps (GstSegmentClip * base, GstCaps * caps) { GstVideoSegmentClip *self = GST_VIDEO_SEGMENT_CLIP (base); gboolean ret; GstStructure *s; gint fps_n, fps_d; s = gst_caps_get_structure (caps, 0); ret = gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d); ret = (fps_d != 0); if (ret) { GST_DEBUG_OBJECT (self, "Configured framerate %d/%d", fps_n, fps_d); self->fps_n = fps_n; self->fps_d = fps_d; } return ret; }
static void cb_caps_set (GObject *obj, GParamSpec *pspec, BansheePlayer *p) { GstStructure * s = NULL; GstCaps * caps = gst_pad_get_negotiated_caps (GST_PAD (obj)); if (G_UNLIKELY (!caps)) { return; } /* Get video decoder caps */ s = gst_caps_get_structure (caps, 0); if (s) { const GValue *par; /* We need at least width/height and framerate */ if (!(gst_structure_get_fraction (s, "framerate", &p->fps_n, &p->fps_d) && gst_structure_get_int (s, "width", &p->width) && gst_structure_get_int (s, "height", &p->height))) { return; } /* Get the PAR if available */ par = gst_structure_get_value (s, "pixel-aspect-ratio"); if (par) { p->par_n = gst_value_get_fraction_numerator (par); p->par_d = gst_value_get_fraction_denominator (par); } else { /* Square pixels */ p->par_n = 1; p->par_d = 1; } /* Notify PlayerEngine if a callback was set */ if (p->video_geometry_notify_cb != NULL) { p->video_geometry_notify_cb (p, p->width, p->height, p->fps_n, p->fps_d, p->par_n, p->par_d); } } gst_caps_unref (caps); }
static GstCaps * gst_video_rate_fixate_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps, GstCaps * othercaps) { GstStructure *s; gint num, denom; const GValue *par; s = gst_caps_get_structure (caps, 0); if (G_UNLIKELY (!gst_structure_get_fraction (s, "framerate", &num, &denom))) return othercaps; othercaps = gst_caps_truncate (othercaps); othercaps = gst_caps_make_writable (othercaps); s = gst_caps_get_structure (othercaps, 0); gst_structure_fixate_field_nearest_fraction (s, "framerate", num, denom); if ((par = gst_structure_get_value (s, "pixel-aspect-ratio"))) gst_structure_fixate_field_nearest_fraction (s, "pixel-aspect-ratio", 1, 1); return othercaps; }
/* For each raw video structure, adds a variant with framerate unset */ static gboolean fix_video_caps_framerate(GstCapsFeatures *f, GstStructure *s, gpointer user_data) { GstCaps *ret = GST_CAPS(user_data); gint fps_n, fps_d; gst_caps_append_structure_full(ret, gst_structure_copy(s), f ? gst_caps_features_copy(f) : NULL); /* Don't mess with non-raw structures */ if (!gst_structure_has_name(s, "video/x-raw")) goto done; /* If possible try to limit the framerate at the source already */ if (gst_structure_get_fraction(s, "framerate", &fps_n, &fps_d)) { GstStructure *tmp = gst_structure_copy(s); gst_structure_remove_field(tmp, "framerate"); gst_caps_append_structure_full(ret, tmp, f ? gst_caps_features_copy(f) : NULL); } done: return TRUE; }
static void _sync_capsfilter_with_track (GESTrack * track, GstElement * capsfilter) { GstCaps *restriction, *caps; gint fps_n, fps_d; GstStructure *structure; g_object_get (track, "restriction-caps", &restriction, NULL); if (restriction && gst_caps_get_size (restriction) > 0) { structure = gst_caps_get_structure (restriction, 0); if (!gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) return; } else { return; } caps = gst_caps_new_simple ("video/x-raw", "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); g_object_set (capsfilter, "caps", caps, NULL); }
static gboolean gst_vtenc_sink_setcaps (GstVTEnc * self, GstCaps * caps) { GstStructure *structure; VTCompressionSessionRef session; GST_OBJECT_LOCK (self); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &self->negotiated_width); gst_structure_get_int (structure, "height", &self->negotiated_height); gst_structure_get_fraction (structure, "framerate", &self->negotiated_fps_n, &self->negotiated_fps_d); if (!gst_video_info_from_caps (&self->video_info, caps)) return FALSE; gst_vtenc_destroy_session (self, &self->session); GST_OBJECT_UNLOCK (self); session = gst_vtenc_create_session (self); GST_OBJECT_LOCK (self); self->session = session; if (self->options != NULL) CFRelease (self->options); self->options = CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); /* renegotiate when upstream caps change */ gst_pad_mark_reconfigure (self->srcpad); GST_OBJECT_UNLOCK (self); return TRUE; }
static gboolean get_framerate(double *frameratep) { int framerate_numerator = 0; int framerate_denom = 0; GList *list = g_list_first(created_pads_list); while (list != NULL) { GstPad *pad = list->data; GstCaps *caps = gst_pad_get_current_caps(pad); if (GST_IS_CAPS(caps)) { const GstStructure *str; if (!gst_caps_is_fixed(caps)) continue; str = gst_caps_get_structure(caps, 0); gst_structure_get_fraction(str, "framerate", &framerate_numerator, &framerate_denom); if (framerate_denom != 0) break; } list = g_list_next(list); } if (framerate_denom == 0) return FALSE; *frameratep = (double)framerate_numerator / framerate_denom; return TRUE; }
void GStreamerReader::VideoPreroll() { /* The first video buffer has reached the video sink. Get width and height */ LOG(PR_LOG_DEBUG, ("Video preroll")); GstPad* sinkpad = gst_element_get_static_pad(GST_ELEMENT(mVideoAppSink), "sink"); #if GST_VERSION_MAJOR >= 1 GstCaps* caps = gst_pad_get_current_caps(sinkpad); memset (&mVideoInfo, 0, sizeof (mVideoInfo)); gst_video_info_from_caps(&mVideoInfo, caps); mFormat = mVideoInfo.finfo->format; mPicture.width = mVideoInfo.width; mPicture.height = mVideoInfo.height; #else GstCaps* caps = gst_pad_get_negotiated_caps(sinkpad); gst_video_format_parse_caps(caps, &mFormat, &mPicture.width, &mPicture.height); #endif GstStructure* structure = gst_caps_get_structure(caps, 0); gst_structure_get_fraction(structure, "framerate", &fpsNum, &fpsDen); NS_ASSERTION(mPicture.width && mPicture.height, "invalid video resolution"); mInfo.mVideo.mDisplay = ThebesIntSize(mPicture.Size()); mInfo.mVideo.mHasVideo = true; gst_caps_unref(caps); gst_object_unref(sinkpad); }
static void sync_properties_from_track (GstFramePositioner * pos, GESTrack * track) { gint width, height; gint old_track_width, old_track_height; GstCaps *caps; g_object_get (track, "restriction-caps", &caps, NULL); width = height = 0; if (caps && gst_caps_get_size (caps) > 0) { GstStructure *structure; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "width", &width)) width = 0; if (!gst_structure_get_int (structure, "height", &height)) height = 0; if (!gst_structure_get_fraction (structure, "framerate", &(pos->fps_n), &(pos->fps_d))) pos->fps_n = -1; } old_track_width = pos->track_width; old_track_height = pos->track_height; pos->track_width = width; pos->track_height = height; GST_DEBUG_OBJECT (pos, "syncing framerate from caps : %d/%d", pos->fps_n, pos->fps_d); gst_frame_positioner_update_properties (pos, ges_track_get_mixing (track), old_track_width, old_track_height); }
static void _gst_vp8_test_check_output_caps (gint width, gint height, gint fps_n, gint fps_d) { GstCaps *caps; GstStructure *structure; gint caps_w, caps_h, caps_fpsn, caps_fpsd; caps = gst_pad_get_current_caps (sinkpad); fail_unless (caps != NULL); structure = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_get_int (structure, "width", &caps_w)); fail_unless (gst_structure_get_int (structure, "height", &caps_h)); fail_unless (gst_structure_get_fraction (structure, "framerate", &caps_fpsn, &caps_fpsd)); fail_unless (width == caps_w); fail_unless (height == caps_h); fail_unless (fps_n == caps_fpsn); fail_unless (fps_d == caps_fpsd); gst_caps_unref (caps); }
static gboolean gst_goom_src_setcaps (GstGoom * goom, GstCaps * caps) { GstStructure *structure; gboolean res; structure = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (structure, "width", &goom->width) || !gst_structure_get_int (structure, "height", &goom->height) || !gst_structure_get_fraction (structure, "framerate", &goom->fps_n, &goom->fps_d)) goto error; goom_set_resolution (goom->plugin, goom->width, goom->height); /* size of the output buffer in bytes, depth is always 4 bytes */ goom->outsize = goom->width * goom->height * 4; goom->duration = gst_util_uint64_scale_int (GST_SECOND, goom->fps_d, goom->fps_n); goom->spf = gst_util_uint64_scale_int (goom->rate, goom->fps_d, goom->fps_n); goom->bpf = goom->spf * goom->bps; GST_DEBUG_OBJECT (goom, "dimension %dx%d, framerate %d/%d, spf %d", goom->width, goom->height, goom->fps_n, goom->fps_d, goom->spf); res = gst_pad_set_caps (goom->srcpad, caps); return res; /* ERRORS */ error: { GST_DEBUG_OBJECT (goom, "error parsing caps"); return FALSE; } }
/* this function handles the link with other elements */ static gboolean gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps) { GstMotioncells *filter; GstPad *otherpad; GstStructure *structure; int numerator, denominator; filter = gst_motion_cells (gst_pad_get_parent (pad)); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &filter->width); gst_structure_get_int (structure, "height", &filter->height); gst_structure_get_fraction (structure, "framerate", &numerator, &denominator); filter->framerate = (double) numerator / (double) denominator; if (filter->cvImage) cvReleaseImage (&filter->cvImage); filter->cvImage = cvCreateImage (cvSize (filter->width, filter->height), IPL_DEPTH_8U, 3); otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad; gst_object_unref (filter); return gst_pad_set_caps (otherpad, caps); }
static GstFlowReturn gst_png_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstPngParse *pngparse = GST_PNG_PARSE (parse); GstMapInfo map; GstByteReader reader; GstFlowReturn ret = GST_FLOW_OK; guint64 signature; guint width = 0, height = 0; gst_buffer_map (frame->buffer, &map, GST_MAP_READ); gst_byte_reader_init (&reader, map.data, map.size); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature != PNG_SIGNATURE) { for (;;) { guint offset; offset = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x89504E47, 0, gst_byte_reader_get_remaining (&reader)); if (offset == -1) { *skipsize = gst_byte_reader_get_remaining (&reader) - 4; goto beach; } gst_byte_reader_skip (&reader, offset); if (!gst_byte_reader_peek_uint64_be (&reader, &signature)) goto beach; if (signature == PNG_SIGNATURE) { /* We're skipping, go out, we'll be back */ *skipsize = gst_byte_reader_get_pos (&reader); goto beach; } gst_byte_reader_skip (&reader, 4); } } gst_byte_reader_skip (&reader, 8); for (;;) { guint32 length; guint32 code; if (!gst_byte_reader_get_uint32_be (&reader, &length)) goto beach; if (!gst_byte_reader_get_uint32_le (&reader, &code)) goto beach; GST_TRACE_OBJECT (parse, "%" GST_FOURCC_FORMAT " chunk, %u bytes", GST_FOURCC_ARGS (code), length); if (code == GST_MAKE_FOURCC ('I', 'H', 'D', 'R')) { if (!gst_byte_reader_get_uint32_be (&reader, &width)) goto beach; if (!gst_byte_reader_get_uint32_be (&reader, &height)) goto beach; length -= 8; } else if (code == GST_MAKE_FOURCC ('I', 'D', 'A', 'T')) { gst_base_parse_set_min_frame_size (parse, gst_byte_reader_get_pos (&reader) + 4 + length + 12); } if (!gst_byte_reader_skip (&reader, length + 4)) goto beach; if (code == GST_MAKE_FOURCC ('I', 'E', 'N', 'D')) { /* the start code and at least 2 empty frames (IHDR and IEND) */ gst_base_parse_set_min_frame_size (parse, 8 + 12 + 12); if (pngparse->width != width || pngparse->height != height) { GstCaps *caps, *sink_caps; pngparse->height = height; pngparse->width = width; caps = gst_caps_new_simple ("image/png", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (pngparse)); if (sink_caps) { GstStructure *st; gint fr_num, fr_denom; st = gst_caps_get_structure (sink_caps, 0); if (st && gst_structure_get_fraction (st, "framerate", &fr_num, &fr_denom)) { gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, fr_num, fr_denom, NULL); } else { GST_WARNING_OBJECT (pngparse, "No framerate set"); } gst_caps_unref (sink_caps); } if (!gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps)) ret = GST_FLOW_NOT_NEGOTIATED; gst_caps_unref (caps); if (ret != GST_FLOW_OK) goto beach; } gst_buffer_unmap (frame->buffer, &map); return gst_base_parse_finish_frame (parse, frame, gst_byte_reader_get_pos (&reader)); } } beach: gst_buffer_unmap (frame->buffer, &map); return ret; }
static gboolean gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps) { GstH264Parse *h264parse; GstStructure *str; const GValue *value; GstBuffer *buffer = NULL; guint size; h264parse = GST_H264_PARSE (parse); /* reset */ h264parse->push_codec = FALSE; str = gst_caps_get_structure (caps, 0); /* accept upstream info if provided */ gst_structure_get_int (str, "width", &h264parse->width); gst_structure_get_int (str, "height", &h264parse->height); gst_structure_get_fraction (str, "framerate", &h264parse->fps_num, &h264parse->fps_den); /* packetized video has a codec_data */ if ((value = gst_structure_get_value (str, "codec_data"))) { guint8 *data; guint num_sps, num_pps, profile, len; gint i; GST_DEBUG_OBJECT (h264parse, "have packetized h264"); /* make note for optional split processing */ h264parse->packetized = TRUE; buffer = gst_value_get_buffer (value); if (!buffer) goto wrong_type; data = GST_BUFFER_DATA (buffer); size = GST_BUFFER_SIZE (buffer); /* parse the avcC data */ if (size < 7) goto avcc_too_small; /* parse the version, this must be 1 */ if (data[0] != 1) goto wrong_version; /* AVCProfileIndication */ /* profile_compat */ /* AVCLevelIndication */ profile = (data[1] << 16) | (data[2] << 8) | data[3]; GST_DEBUG_OBJECT (h264parse, "profile %06x", profile); /* 6 bits reserved | 2 bits lengthSizeMinusOne */ /* this is the number of bytes in front of the NAL units to mark their * length */ h264parse->nal_length_size = (data[4] & 0x03) + 1; GST_DEBUG_OBJECT (h264parse, "nal length %u", h264parse->nal_length_size); num_sps = data[5] & 0x1f; data += 6; size -= 6; for (i = 0; i < num_sps; i++) { len = GST_READ_UINT16_BE (data); if (size < len + 2 || len < 2) goto avcc_too_small; /* digest for later reference */ gst_h264_parse_process_nal (h264parse, data, 0, 2, len); data += len + 2; size -= len + 2; } num_pps = data[0]; data++; size++; for (i = 0; i < num_pps; i++) { len = GST_READ_UINT16_BE (data); if (size < len + 2 || len < 2) goto avcc_too_small; /* digest for later reference */ gst_h264_parse_process_nal (h264parse, data, 0, 2, len); data += len + 2; size -= len + 2; } } else { GST_DEBUG_OBJECT (h264parse, "have bytestream h264"); /* nothing to pre-process */ h264parse->packetized = FALSE; /* we have 4 sync bytes */ h264parse->nal_length_size = 4; } if (h264parse->packetized) { if (h264parse->split_packetized) { GST_DEBUG_OBJECT (h264parse, "converting AVC to nal bytestream prior to parsing"); /* negotiate behaviour with upstream */ gst_h264_parse_negotiate (h264parse); if (h264parse->format == GST_H264_PARSE_FORMAT_BYTE) { /* arrange to insert codec-data in-stream if needed */ h264parse->push_codec = h264parse->packetized; } } else { GST_DEBUG_OBJECT (h264parse, "passing on packetized AVC"); /* no choice to negotiate */ h264parse->format = GST_H264_PARSE_FORMAT_AVC; h264parse->align = GST_H264_PARSE_ALIGN_AU; /* fallback codec-data */ h264parse->codec_data = gst_buffer_ref (buffer); /* pass through unharmed, though _chain will parse a bit */ gst_base_parse_set_format (parse, GST_BASE_PARSE_FORMAT_PASSTHROUGH, TRUE); /* we did parse codec-data and might supplement src caps */ gst_h264_parse_update_src_caps (h264parse); } } /* src caps are only arranged for later on */ return TRUE; /* ERRORS */ avcc_too_small: { GST_DEBUG_OBJECT (h264parse, "avcC size %u < 7", size); goto refuse_caps; } wrong_version: { GST_DEBUG_OBJECT (h264parse, "wrong avcC version"); goto refuse_caps; } wrong_type: { GST_DEBUG_OBJECT (h264parse, "wrong codec-data type"); goto refuse_caps; } refuse_caps: { GST_WARNING_OBJECT (h264parse, "refused caps %" GST_PTR_FORMAT, caps); return FALSE; } }
/*! * \brief OpenIMAJCapGStreamer::getProperty retreive the requested property from the pipeline * \param propId requested property * \return property value * * There are two ways the properties can be retreived. For seek-based properties we can query the pipeline. * For frame-based properties, we use the caps of the lasst receivef sample. This means that some properties * are not available until a first frame was received */ double OpenIMAJCapGStreamer::getProperty( int propId ) { GstFormat format; gint64 value; gboolean status; #define FORMAT format if(!pipeline) { WARN("GStreamer: no pipeline"); return false; } switch(propId) { case CAP_PROP_POS_MSEC: format = GST_FORMAT_TIME; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { WARN("GStreamer: unable to query position of stream"); return false; } return value * 1e-6; // nano seconds to milli seconds case CAP_PROP_POS_FRAMES: format = GST_FORMAT_DEFAULT; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { WARN("GStreamer: unable to query position of stream"); return false; } return value; case CAP_PROP_POS_AVI_RATIO: format = GST_FORMAT_PERCENT; status = gst_element_query_position(sink, FORMAT, &value); if(!status) { WARN("GStreamer: unable to query position of stream"); return false; } return ((double) value) / GST_FORMAT_PERCENT_MAX; case CAP_PROP_FRAME_WIDTH: { if (!buffer_caps){ WARN("GStreamer: unable to query width of frame; no frame grabbed yet"); return 0; } GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); gint width = 0; if(!gst_structure_get_int(structure, "width", &width)){ WARN("GStreamer: unable to query width of frame"); return 0; } return width; break; } case CAP_PROP_FRAME_HEIGHT: { if (!buffer_caps){ WARN("GStreamer: unable to query height of frame; no frame grabbed yet"); return 0; } GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); gint height = 0; if(!gst_structure_get_int(structure, "height", &height)){ WARN("GStreamer: unable to query height of frame"); return 0; } return height; break; } case CAP_PROP_FPS: { if (!buffer_caps){ WARN("GStreamer: unable to query framerate of stream; no frame grabbed yet"); return 0; } GstStructure* structure = gst_caps_get_structure(buffer_caps, 0); gint num = 0, denom=1; if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)){ WARN("GStreamer: unable to query framerate of stream"); return 0; } return (double)num/(double)denom; break; } case CAP_GSTREAMER_QUEUE_LENGTH: if(!sink) { WARN("GStreamer: there is no sink yet"); return false; } return gst_app_sink_get_max_buffers(GST_APP_SINK(sink)); default: WARN("GStreamer: unhandled property"); break; } #undef FORMAT return false; }
static gboolean sink_setcaps(GstPad *pad, GstCaps *caps) { struct obj *self; GstStructure *in_struc; const char *name; int codec_id; const GValue *codec_data; GstBuffer *buf; AVCodecContext *ctx; self = (struct obj *)((GstObject *)pad)->parent; ctx = self->av_ctx; in_struc = gst_caps_get_structure(caps, 0); gst_structure_get_int(in_struc, "width", &ctx->width); gst_structure_get_int(in_struc, "height", &ctx->height); gst_structure_get_fraction(in_struc, "pixel-aspect-ratio", &ctx->sample_aspect_ratio.num, &ctx->sample_aspect_ratio.den); gst_structure_get_fraction(in_struc, "framerate", &ctx->time_base.den, &ctx->time_base.num); /* bug in xvimagesink? */ if (!ctx->time_base.num) ctx->time_base = (AVRational){ 1, 0 }; name = gst_structure_get_name(in_struc); if (strcmp(name, "video/x-h263") == 0) codec_id = CODEC_ID_H263; else if (strcmp(name, "video/x-h264") == 0) codec_id = CODEC_ID_H264; else if (strcmp(name, "video/mpeg") == 0) { int version; gst_structure_get_int(in_struc, "mpegversion", &version); switch (version) { case 4: codec_id = CODEC_ID_MPEG4; break; case 2: codec_id = CODEC_ID_MPEG2VIDEO; break; case 1: codec_id = CODEC_ID_MPEG1VIDEO; break; default: codec_id = CODEC_ID_NONE; break; } } else if (strcmp(name, "video/x-divx") == 0) { int version; gst_structure_get_int(in_struc, "divxversion", &version); switch (version) { case 5: case 4: codec_id = CODEC_ID_MPEG4; break; case 3: codec_id = CODEC_ID_MSMPEG4V3; break; default: codec_id = CODEC_ID_NONE; break; } } else if (strcmp(name, "video/x-xvid") == 0) codec_id = CODEC_ID_MPEG4; else if (strcmp(name, "video/x-3ivx") == 0) codec_id = CODEC_ID_MPEG4; else if (strcmp(name, "video/x-vp8") == 0) codec_id = CODEC_ID_VP8; else if (strcmp(name, "video/x-wmv") == 0) { int version; gst_structure_get_int(in_struc, "wmvversion", &version); switch (version) { case 3: { guint32 fourcc; codec_id = CODEC_ID_WMV3; if (gst_structure_get_fourcc(in_struc, "fourcc", &fourcc) || gst_structure_get_fourcc(in_struc, "format", &fourcc)) { if (fourcc == GST_MAKE_FOURCC('W', 'V', 'C', '1')) codec_id = CODEC_ID_VC1; } break; } case 2: codec_id = CODEC_ID_WMV2; break; case 1: codec_id = CODEC_ID_WMV1; break; default: codec_id = CODEC_ID_NONE; break; } } else codec_id = CODEC_ID_NONE; self->codec = avcodec_find_decoder(codec_id); if (!self->codec) return false; switch (codec_id) { case CODEC_ID_H263: self->parse_func = gst_av_h263_parse; break; case CODEC_ID_H264: self->parse_func = gst_av_h264_parse; break; case CODEC_ID_MPEG4: self->parse_func = gst_av_mpeg4_parse; break; } codec_data = gst_structure_get_value(in_struc, "codec_data"); if (!codec_data) goto next; buf = gst_value_get_buffer(codec_data); if (!buf) goto next; ctx->extradata = malloc(buf->size + FF_INPUT_BUFFER_PADDING_SIZE); memcpy(ctx->extradata, buf->data, buf->size); ctx->extradata_size = buf->size; if (self->parse_func) self->parse_func(self, buf); next: return true; }
static gboolean gst_mpeg4p2unpack_sink_event(GstPad * pad, GstObject *parent, GstEvent * event) { GstMpeg4P2Unpack *self = GST_MPEG4P2UNPACK(gst_pad_get_parent(pad)); gboolean ret = FALSE; GST_LOG_OBJECT(self, "%s event", GST_EVENT_TYPE_NAME(event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CAPS: { GstCaps *caps, *srccaps; gst_event_parse_caps(event, &caps); GST_DEBUG_OBJECT(self, "sinkcaps = %s", gst_caps_to_string(caps)); GstStructure *structure = gst_structure_copy(gst_caps_get_structure (caps, 0)); gint numerator, denominator; if (gst_structure_get_fraction (structure, "framerate", &numerator, &denominator)) { self->buffer_duration = 1000 / ((double)numerator * 1000 / denominator) * GST_SECOND; } srccaps = gst_caps_new_empty(); gst_structure_set_name(structure, "video/mpeg"); srccaps = gst_caps_merge_structure(srccaps, structure); gst_caps_set_simple (srccaps, "mpegversion", G_TYPE_INT, 4, NULL); gst_caps_set_simple (srccaps, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); gst_caps_set_simple (srccaps, "unpacked", G_TYPE_BOOLEAN, TRUE, NULL); GST_DEBUG_OBJECT(self, "srccaps = %s", gst_caps_to_string(srccaps)); ret = gst_pad_set_caps(self->srcpad, srccaps); gst_caps_unref(srccaps); gst_event_unref(event); } break; case GST_EVENT_FLUSH_STOP: { guint i; for (i=0; i < MPEG4P2_MAX_B_FRAMES_COUNT; i++) { if (self->b_frames[i]) { gst_buffer_unref(self->b_frames[i]); self->b_frames[i] = NULL; } } if (self->second_ip_frame) { gst_buffer_unref(self->second_ip_frame); self->second_ip_frame = NULL; } if (self->b_frame) { gst_buffer_unref(self->b_frame); self->b_frame = NULL; } self->b_frames_count = 0; self->first_ip_frame_written = FALSE; ret = gst_pad_push_event(self->srcpad, event); } break; default: ret = gst_pad_push_event(self->srcpad, event); break; } return ret; }
static GstCaps * gst_video_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction, GstCaps * caps, GstCaps * othercaps) { GstStructure *ins, *outs; const GValue *from_par, *to_par; GValue fpar = { 0, }, tpar = { 0, }; othercaps = gst_caps_truncate (othercaps); othercaps = gst_caps_make_writable (othercaps); GST_DEBUG_OBJECT (base, "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %" GST_PTR_FORMAT, othercaps, caps); ins = gst_caps_get_structure (caps, 0); outs = gst_caps_get_structure (othercaps, 0); from_par = gst_structure_get_value (ins, "pixel-aspect-ratio"); to_par = gst_structure_get_value (outs, "pixel-aspect-ratio"); /* If we're fixating from the sinkpad we always set the PAR and * assume that missing PAR on the sinkpad means 1/1 and * missing PAR on the srcpad means undefined */ if (direction == GST_PAD_SINK) { if (!from_par) { g_value_init (&fpar, GST_TYPE_FRACTION); gst_value_set_fraction (&fpar, 1, 1); from_par = &fpar; } if (!to_par) { g_value_init (&tpar, GST_TYPE_FRACTION_RANGE); gst_value_set_fraction_range_full (&tpar, 1, G_MAXINT, G_MAXINT, 1); to_par = &tpar; } } else { if (!to_par) { g_value_init (&tpar, GST_TYPE_FRACTION); gst_value_set_fraction (&tpar, 1, 1); to_par = &tpar; gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL); } if (!from_par) { g_value_init (&fpar, GST_TYPE_FRACTION); gst_value_set_fraction (&fpar, 1, 1); from_par = &fpar; } } /* we have both PAR but they might not be fixated */ { gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d; gint w = 0, h = 0; gint from_dar_n, from_dar_d; gint num, den; /* from_par should be fixed */ g_return_val_if_fail (gst_value_is_fixed (from_par), othercaps); from_par_n = gst_value_get_fraction_numerator (from_par); from_par_d = gst_value_get_fraction_denominator (from_par); gst_structure_get_int (ins, "width", &from_w); gst_structure_get_int (ins, "height", &from_h); gst_structure_get_int (outs, "width", &w); gst_structure_get_int (outs, "height", &h); /* if both width and height are already fixed, we can't do anything * about it anymore */ if (w && h) { guint n, d; GST_DEBUG_OBJECT (base, "dimensions already set to %dx%d, not fixating", w, h); if (!gst_value_is_fixed (to_par)) { if (gst_video_calculate_display_ratio (&n, &d, from_w, from_h, from_par_n, from_par_d, w, h)) { GST_DEBUG_OBJECT (base, "fixating to_par to %dx%d", n, d); if (gst_structure_has_field (outs, "pixel-aspect-ratio")) gst_structure_fixate_field_nearest_fraction (outs, "pixel-aspect-ratio", n, d); else if (n != d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, n, d, NULL); } } goto done; } /* Calculate input DAR */ if (!gst_util_fraction_multiply (from_w, from_h, from_par_n, from_par_d, &from_dar_n, &from_dar_d)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } GST_DEBUG_OBJECT (base, "Input DAR is %d/%d", from_dar_n, from_dar_d); /* If either width or height are fixed there's not much we * can do either except choosing a height or width and PAR * that matches the DAR as good as possible */ if (h) { GstStructure *tmp; gint set_w, set_par_n, set_par_d; GST_DEBUG_OBJECT (base, "height is fixed (%d)", h); /* If the PAR is fixed too, there's not much to do * except choosing the width that is nearest to the * width with the same DAR */ if (gst_value_is_fixed (to_par)) { to_par_n = gst_value_get_fraction_numerator (to_par); to_par_d = gst_value_get_fraction_denominator (to_par); GST_DEBUG_OBJECT (base, "PAR is fixed %d/%d", to_par_n, to_par_d); if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d, to_par_n, &num, &den)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } w = (guint) gst_util_uint64_scale_int (h, num, den); gst_structure_fixate_field_nearest_int (outs, "width", w); goto done; } /* The PAR is not fixed and it's quite likely that we can set * an arbitrary PAR. */ /* Check if we can keep the input width */ tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "width", from_w); gst_structure_get_int (tmp, "width", &set_w); /* Might have failed but try to keep the DAR nonetheless by * adjusting the PAR */ if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, h, set_w, &to_par_n, &to_par_d)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); gst_structure_free (tmp); goto done; } if (!gst_structure_has_field (tmp, "pixel-aspect-ratio")) gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par); gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio", to_par_n, to_par_d); gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n, &set_par_d); gst_structure_free (tmp); /* Check if the adjusted PAR is accepted */ if (set_par_n == to_par_n && set_par_d == to_par_d) { if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "width", G_TYPE_INT, set_w, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* Otherwise scale the width to the new PAR and check if the * adjusted with is accepted. If all that fails we can't keep * the DAR */ if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d, set_par_n, &num, &den)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } w = (guint) gst_util_uint64_scale_int (h, num, den); gst_structure_fixate_field_nearest_int (outs, "width", w); if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } else if (w) { GstStructure *tmp; gint set_h, set_par_n, set_par_d; GST_DEBUG_OBJECT (base, "width is fixed (%d)", w); /* If the PAR is fixed too, there's not much to do * except choosing the height that is nearest to the * height with the same DAR */ if (gst_value_is_fixed (to_par)) { to_par_n = gst_value_get_fraction_numerator (to_par); to_par_d = gst_value_get_fraction_denominator (to_par); GST_DEBUG_OBJECT (base, "PAR is fixed %d/%d", to_par_n, to_par_d); if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_d, to_par_n, &num, &den)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } h = (guint) gst_util_uint64_scale_int (w, den, num); gst_structure_fixate_field_nearest_int (outs, "height", h); goto done; } /* The PAR is not fixed and it's quite likely that we can set * an arbitrary PAR. */ /* Check if we can keep the input height */ tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "height", from_h); gst_structure_get_int (tmp, "height", &set_h); /* Might have failed but try to keep the DAR nonetheless by * adjusting the PAR */ if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, w, &to_par_n, &to_par_d)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); gst_structure_free (tmp); goto done; } if (!gst_structure_has_field (tmp, "pixel-aspect-ratio")) gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par); gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio", to_par_n, to_par_d); gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n, &set_par_d); gst_structure_free (tmp); /* Check if the adjusted PAR is accepted */ if (set_par_n == to_par_n && set_par_d == to_par_d) { if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "height", G_TYPE_INT, set_h, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* Otherwise scale the height to the new PAR and check if the * adjusted with is accepted. If all that fails we can't keep * the DAR */ if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d, set_par_n, &num, &den)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } h = (guint) gst_util_uint64_scale_int (w, den, num); gst_structure_fixate_field_nearest_int (outs, "height", h); if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } else if (gst_value_is_fixed (to_par)) { GstStructure *tmp; gint set_h, set_w, f_h, f_w; to_par_n = gst_value_get_fraction_numerator (to_par); to_par_d = gst_value_get_fraction_denominator (to_par); /* Calculate scale factor for the PAR change */ if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, to_par_n, to_par_d, &num, &den)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } /* Try to keep the input height (because of interlacing) */ tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "height", from_h); gst_structure_get_int (tmp, "height", &set_h); /* This might have failed but try to scale the width * to keep the DAR nonetheless */ w = (guint) gst_util_uint64_scale_int (set_h, num, den); gst_structure_fixate_field_nearest_int (tmp, "width", w); gst_structure_get_int (tmp, "width", &set_w); gst_structure_free (tmp); /* We kept the DAR and the height is nearest to the original height */ if (set_w == w) { gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); goto done; } f_h = set_h; f_w = set_w; /* If the former failed, try to keep the input width at least */ tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "width", from_w); gst_structure_get_int (tmp, "width", &set_w); /* This might have failed but try to scale the width * to keep the DAR nonetheless */ h = (guint) gst_util_uint64_scale_int (set_w, den, num); gst_structure_fixate_field_nearest_int (tmp, "height", h); gst_structure_get_int (tmp, "height", &set_h); gst_structure_free (tmp); /* We kept the DAR and the width is nearest to the original width */ if (set_h == h) { gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); goto done; } /* If all this failed, keep the height that was nearest to the orignal * height and the nearest possible width. This changes the DAR but * there's not much else to do here. */ gst_structure_set (outs, "width", G_TYPE_INT, f_w, "height", G_TYPE_INT, f_h, NULL); goto done; } else { GstStructure *tmp; gint set_h, set_w, set_par_n, set_par_d, tmp2; /* width, height and PAR are not fixed but passthrough is not possible */ /* First try to keep the height and width as good as possible * and scale PAR */ tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "height", from_h); gst_structure_get_int (tmp, "height", &set_h); gst_structure_fixate_field_nearest_int (tmp, "width", from_w); gst_structure_get_int (tmp, "width", &set_w); if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_h, set_w, &to_par_n, &to_par_d)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); gst_structure_free (tmp); goto done; } if (!gst_structure_has_field (tmp, "pixel-aspect-ratio")) gst_structure_set_value (tmp, "pixel-aspect-ratio", to_par); gst_structure_fixate_field_nearest_fraction (tmp, "pixel-aspect-ratio", to_par_n, to_par_d); gst_structure_get_fraction (tmp, "pixel-aspect-ratio", &set_par_n, &set_par_d); gst_structure_free (tmp); if (set_par_n == to_par_n && set_par_d == to_par_d) { gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* Otherwise try to scale width to keep the DAR with the set * PAR and height */ if (!gst_util_fraction_multiply (from_dar_n, from_dar_d, set_par_d, set_par_n, &num, &den)) { GST_ELEMENT_ERROR (base, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } w = (guint) gst_util_uint64_scale_int (set_h, num, den); tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "width", w); gst_structure_get_int (tmp, "width", &tmp2); gst_structure_free (tmp); if (tmp2 == w) { gst_structure_set (outs, "width", G_TYPE_INT, tmp2, "height", G_TYPE_INT, set_h, NULL); if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* ... or try the same with the height */ h = (guint) gst_util_uint64_scale_int (set_w, den, num); tmp = gst_structure_copy (outs); gst_structure_fixate_field_nearest_int (tmp, "height", h); gst_structure_get_int (tmp, "height", &tmp2); gst_structure_free (tmp); if (tmp2 == h) { gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, tmp2, NULL); if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* If all fails we can't keep the DAR and take the nearest values * for everything from the first try */ gst_structure_set (outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); if (gst_structure_has_field (outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set (outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); } } done: GST_DEBUG_OBJECT (base, "fixated othercaps to %" GST_PTR_FORMAT, othercaps); if (from_par == &fpar) g_value_unset (&fpar); if (to_par == &tpar) g_value_unset (&tpar); return othercaps; }
gboolean gst_ks_video_device_set_caps (GstKsVideoDevice * self, GstCaps * caps) { GstKsVideoDevicePrivate *priv = GST_KS_VIDEO_DEVICE_GET_PRIVATE (self); GList *cur; GstStructure *s; /* State to be committed on success */ KsVideoMediaType *media_type = NULL; guint width, height, fps_n, fps_d; HANDLE pin_handle = INVALID_HANDLE_VALUE; /* Reset? */ if (caps == NULL) { gst_ks_video_device_reset_caps (self); return TRUE; } /* Validate the caps */ if (!gst_caps_is_subset (caps, priv->cached_caps)) { gchar *string_caps = gst_caps_to_string (caps); gchar *string_c_caps = gst_caps_to_string (priv->cached_caps); GST_ERROR ("caps (%s) is not a subset of device caps (%s)", string_caps, string_c_caps); g_free (string_caps); g_free (string_c_caps); goto error; } for (cur = priv->media_types; cur != NULL; cur = cur->next) { KsVideoMediaType *mt = cur->data; if (gst_caps_is_subset (caps, mt->translated_caps)) { media_type = ks_video_media_type_dup (mt); break; } } if (media_type == NULL) goto error; s = gst_caps_get_structure (caps, 0); if (!gst_structure_get_int (s, "width", &width) || !gst_structure_get_int (s, "height", &height) || !gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) { GST_ERROR ("Failed to get width/height/fps"); goto error; } if (!ks_video_fixate_media_type (media_type->range, media_type->format, width, height, fps_n, fps_d)) goto error; if (priv->cur_media_type != NULL) { if (media_type->format_size == priv->cur_media_type->format_size && memcmp (media_type->format, priv->cur_media_type->format, priv->cur_media_type->format_size) == 0) { GST_DEBUG ("%s: re-using existing pin", G_STRFUNC); goto same_caps; } else { GST_DEBUG ("%s: re-creating pin", G_STRFUNC); } } gst_ks_video_device_close_current_pin (self); pin_handle = gst_ks_video_device_create_pin (self, media_type, &priv->num_requests); if (!ks_is_valid_handle (pin_handle)) { /* Re-create the old pin */ if (priv->cur_media_type != NULL) priv->pin_handle = gst_ks_video_device_create_pin (self, priv->cur_media_type, &priv->num_requests); goto error; } /* Commit state: no turning back past this */ gst_ks_video_device_reset_caps (self); priv->cur_media_type = media_type; priv->width = width; priv->height = height; priv->fps_n = fps_n; priv->fps_d = fps_d; if (gst_structure_has_name (s, "video/x-raw-rgb")) priv->rgb_swap_buf = g_malloc (media_type->sample_size / priv->height); else priv->rgb_swap_buf = NULL; priv->is_mjpeg = gst_structure_has_name (s, "image/jpeg"); priv->pin_handle = pin_handle; priv->cur_fixed_caps = gst_caps_copy (caps); return TRUE; error: { ks_video_media_type_free (media_type); return FALSE; } same_caps: { ks_video_media_type_free (media_type); return TRUE; } }
/** * gst_video_info_from_caps: * @info: a #GstVideoInfo * @caps: a #GstCaps * * Parse @caps and update @info. * * Returns: TRUE if @caps could be parsed */ gboolean gst_video_info_from_caps (GstVideoInfo * info, const GstCaps * caps) { GstStructure *structure; const gchar *s; GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN; gint width = 0, height = 0; gint fps_n, fps_d; gint par_n, par_d; g_return_val_if_fail (info != NULL, FALSE); g_return_val_if_fail (caps != NULL, FALSE); g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); GST_DEBUG ("parsing caps %" GST_PTR_FORMAT, caps); structure = gst_caps_get_structure (caps, 0); if (gst_structure_has_name (structure, "video/x-raw")) { if (!(s = gst_structure_get_string (structure, "format"))) goto no_format; format = gst_video_format_from_string (s); if (format == GST_VIDEO_FORMAT_UNKNOWN) goto unknown_format; } else if (g_str_has_prefix (gst_structure_get_name (structure), "video/") || g_str_has_prefix (gst_structure_get_name (structure), "image/")) { format = GST_VIDEO_FORMAT_ENCODED; } else { goto wrong_name; } /* width and height are mandatory, except for non-raw-formats */ if (!gst_structure_get_int (structure, "width", &width) && format != GST_VIDEO_FORMAT_ENCODED) goto no_width; if (!gst_structure_get_int (structure, "height", &height) && format != GST_VIDEO_FORMAT_ENCODED) goto no_height; gst_video_info_init (info); info->finfo = gst_video_format_get_info (format); info->width = width; info->height = height; if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { if (fps_n == 0) { /* variable framerate */ info->flags |= GST_VIDEO_FLAG_VARIABLE_FPS; /* see if we have a max-framerate */ gst_structure_get_fraction (structure, "max-framerate", &fps_n, &fps_d); } info->fps_n = fps_n; info->fps_d = fps_d; } else { /* unspecified is variable framerate */ info->fps_n = 0; info->fps_d = 1; } if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { info->par_n = par_n; info->par_d = par_d; } else { info->par_n = 1; info->par_d = 1; } if ((s = gst_structure_get_string (structure, "interlace-mode"))) info->interlace_mode = gst_video_interlace_mode_from_string (s); else info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; { if ((s = gst_structure_get_string (structure, "multiview-mode"))) GST_VIDEO_INFO_MULTIVIEW_MODE (info) = gst_video_multiview_mode_from_caps_string (s); else GST_VIDEO_INFO_MULTIVIEW_MODE (info) = GST_VIDEO_MULTIVIEW_MODE_NONE; gst_structure_get_flagset (structure, "multiview-flags", &GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), NULL); if (!gst_structure_get_int (structure, "views", &info->views)) info->views = 1; /* At one point, I tried normalising the half-aspect flag here, * but it behaves weird for GstVideoInfo operations other than * directly converting to/from caps - sometimes causing the * PAR to be doubled/halved too many times */ } if ((s = gst_structure_get_string (structure, "chroma-site"))) info->chroma_site = gst_video_chroma_from_string (s); else info->chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN; if ((s = gst_structure_get_string (structure, "colorimetry"))) { if (!gst_video_colorimetry_from_string (&info->colorimetry, s)) { GST_WARNING ("unparsable colorimetry, using default"); set_default_colorimetry (info); } else if (!validate_colorimetry (info)) { GST_WARNING ("invalid colorimetry, using default"); set_default_colorimetry (info); } else { /* force RGB matrix for RGB formats */ if (GST_VIDEO_FORMAT_INFO_IS_RGB (info->finfo) && info->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) { GST_WARNING ("invalid matrix %d for RGB format, using RGB", info->colorimetry.matrix); info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB; } } } else { GST_DEBUG ("no colorimetry, using default"); set_default_colorimetry (info); } fill_planes (info); return TRUE; /* ERROR */ wrong_name: { GST_ERROR ("wrong name '%s', expected video/ or image/", gst_structure_get_name (structure)); return FALSE; } no_format: { GST_ERROR ("no format given"); return FALSE; } unknown_format: { GST_ERROR ("unknown format '%s' given", s); return FALSE; } no_width: { GST_ERROR ("no width property given"); return FALSE; } no_height: { GST_ERROR ("no height property given"); return FALSE; } }
static gboolean theora_enc_sink_setcaps (GstPad * pad, GstCaps * caps) { GstStructure *structure = gst_caps_get_structure (caps, 0); GstTheoraEnc *enc = GST_THEORA_ENC (gst_pad_get_parent (pad)); const GValue *par; gint fps_n, fps_d; gst_structure_get_int (structure, "width", &enc->width); gst_structure_get_int (structure, "height", &enc->height); gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d); par = gst_structure_get_value (structure, "pixel-aspect-ratio"); theora_info_clear (&enc->info); theora_info_init (&enc->info); /* Theora has a divisible-by-sixteen restriction for the encoded video size but * we can define a visible area using the frame_width/frame_height */ enc->info_width = enc->info.width = (enc->width + 15) & ~15; enc->info_height = enc->info.height = (enc->height + 15) & ~15; enc->info.frame_width = enc->width; enc->info.frame_height = enc->height; /* center image if needed */ if (enc->center) { /* make sure offset is even, for easier decoding */ enc->offset_x = GST_ROUND_UP_2 ((enc->info_width - enc->width) / 2); enc->offset_y = GST_ROUND_UP_2 ((enc->info_height - enc->height) / 2); } else { enc->offset_x = 0; enc->offset_y = 0; } enc->info.offset_x = enc->offset_x; enc->info.offset_y = enc->offset_y; enc->info.fps_numerator = enc->fps_n = fps_n; enc->info.fps_denominator = enc->fps_d = fps_d; if (par) { enc->info.aspect_numerator = gst_value_get_fraction_numerator (par); enc->info.aspect_denominator = gst_value_get_fraction_denominator (par); } else { /* setting them to 0 indicates that the decoder can chose a good aspect * ratio, defaulting to 1/1 */ enc->info.aspect_numerator = 0; enc->info.aspect_denominator = 0; } enc->info.colorspace = OC_CS_UNSPECIFIED; enc->info.target_bitrate = enc->video_bitrate; enc->info.quality = enc->video_quality; enc->info.dropframes_p = 0; enc->info.quick_p = (enc->quick ? 1 : 0); enc->info.keyframe_auto_p = (enc->keyframe_auto ? 1 : 0); enc->info.keyframe_frequency = enc->keyframe_freq; enc->info.keyframe_frequency_force = enc->keyframe_force; enc->info.keyframe_data_target_bitrate = enc->video_bitrate * 1.5; enc->info.keyframe_auto_threshold = enc->keyframe_threshold; enc->info.keyframe_mindistance = enc->keyframe_mindistance; enc->info.noise_sensitivity = enc->noise_sensitivity; enc->info.sharpness = enc->sharpness; /* as done in theora */ enc->granule_shift = _ilog (enc->info.keyframe_frequency_force - 1); GST_DEBUG_OBJECT (enc, "keyframe_frequency_force is %d, granule shift is %d", enc->info.keyframe_frequency_force, enc->granule_shift); theora_enc_reset (enc); enc->initialised = TRUE; gst_object_unref (enc); return TRUE; }