static gboolean start(GstBaseSrc *base) { GstTICaptureSrc *src = (GstTICaptureSrc *)base; GST_LOG("start begin"); src->offset = 0; // gst_value_set_fraction(&src->framerate,DEFAULT_FRAMERATE_NUM, DEFAULT_FRAMERATE_DEN); src->cAttrs.numBufs = src->numbufs; src->cAttrs.captureDevice = src->device; src->cAttrs.videoInput = dmai_capture_input(src->capture_input); src->cAttrs.videoStd = dmai_video_std(src->video_standard); /* if we have a framerate pre-calculate duration */ if (gst_value_get_fraction_numerator(&src->framerate)>0 && gst_value_get_fraction_denominator(&src->framerate)>0) { src->duration = gst_util_uint64_scale_int (GST_SECOND, gst_value_get_fraction_denominator(&src->framerate), gst_value_get_fraction_numerator(&src->framerate)); } else { src->duration = GST_CLOCK_TIME_NONE; } GST_LOG("start end"); return TRUE; }
static void compare_shutter_speed (ExifEntry * entry, ExifTagCheckData * testdata) { gdouble gst_num, exif_num; ExifSRational rational; GValue exif_value = { 0 }; const GValue *gst_value = NULL; gst_value = gst_tag_list_get_value_index (testdata->taglist, GST_TAG_CAPTURING_SHUTTER_SPEED, 0); if (gst_value == NULL) { GST_WARNING ("Failed to get shutter-speed from taglist"); return; } rational = exif_get_srational (entry->data, exif_data_get_byte_order (entry->parent->parent)); g_value_init (&exif_value, GST_TYPE_FRACTION); gst_value_set_fraction (&exif_value, rational.numerator, rational.denominator); gst_util_fraction_to_double (gst_value_get_fraction_numerator (&exif_value), gst_value_get_fraction_denominator (&exif_value), &exif_num); g_value_unset (&exif_value); gst_util_fraction_to_double (gst_value_get_fraction_numerator (gst_value), gst_value_get_fraction_denominator (gst_value), &gst_num); exif_num = pow (2, -exif_num); GST_LOG ("Shutter speed in gst=%lf and in exif=%lf", gst_num, exif_num); fail_unless (ABS (gst_num - exif_num) < 0.001); testdata->result = TRUE; }
static void get_supported_framerates (ofGstVideoFormat &video_format, GstStructure &structure) { const GValue *framerates; int i, j; framerates = gst_structure_get_value (&structure, "framerate"); if (GST_VALUE_HOLDS_FRACTION (framerates)) { video_format.num_framerates = 1; video_format.framerates = new ofGstFramerate[video_format.num_framerates]; video_format.framerates[0].numerator = gst_value_get_fraction_numerator (framerates); video_format.framerates[0].denominator = gst_value_get_fraction_denominator (framerates); } else if (GST_VALUE_HOLDS_LIST (framerates)) { video_format.num_framerates = gst_value_list_get_size (framerates); video_format.framerates = new ofGstFramerate[video_format.num_framerates]; for (i = 0; i < video_format.num_framerates; i++) { const GValue *value; value = gst_value_list_get_value (framerates, i); video_format.framerates[i].numerator = gst_value_get_fraction_numerator (value); video_format.framerates[i].denominator = gst_value_get_fraction_denominator (value); } } else if (GST_VALUE_HOLDS_FRACTION_RANGE (framerates)) { int numerator_min, denominator_min, numerator_max, denominator_max; const GValue *fraction_range_min; const GValue *fraction_range_max; fraction_range_min = gst_value_get_fraction_range_min (framerates); numerator_min = gst_value_get_fraction_numerator (fraction_range_min); denominator_min = gst_value_get_fraction_denominator (fraction_range_min); fraction_range_max = gst_value_get_fraction_range_max (framerates); numerator_max = gst_value_get_fraction_numerator (fraction_range_max); denominator_max = gst_value_get_fraction_denominator (fraction_range_max); g_print ("FractionRange: %d/%d - %d/%d\n", numerator_min, denominator_min, numerator_max, denominator_max); video_format.num_framerates = (numerator_max - numerator_min + 1) * (denominator_max - denominator_min + 1); video_format.framerates = new ofGstFramerate[video_format.num_framerates]; int k = 0; for (i = numerator_min; i <= numerator_max; i++) { for (j = denominator_min; j <= denominator_max; j++) { video_format.framerates[k].numerator = i; video_format.framerates[k].denominator = j; k++; } } } else { g_critical ("GValue type %s, cannot be handled for framerates", G_VALUE_TYPE_NAME (framerates)); } }
std::string VideoV4lSource::srcCaps(unsigned int framerateIndex) const { std::ostringstream capsStr; GstStateChangeReturn ret = gst_element_set_state(source_, GST_STATE_READY); if (ret not_eq GST_STATE_CHANGE_SUCCESS) THROW_ERROR("Could not change v4l2src state to READY"); GstPad *srcPad = gst_element_get_static_pad(source_, "src"); GstCaps *caps = gst_pad_get_caps(srcPad); GstStructure *structure = gst_caps_get_structure(caps, 0); const GValue *val = gst_structure_get_value(structure, "framerate"); LOG_DEBUG("Caps structure from v4l2src srcpad: " << gst_structure_to_string(structure)); gint framerate_numerator, framerate_denominator; if (GST_VALUE_HOLDS_LIST(val)) { // trying another one if (framerateIndex >= gst_value_list_get_size(val)) THROW_ERROR("Framerate index out of range"); framerate_numerator = gst_value_get_fraction_numerator((gst_value_list_get_value(val, framerateIndex))); framerate_denominator = gst_value_get_fraction_denominator((gst_value_list_get_value(val, framerateIndex))); } else { // FIXME: this is really bad, we should be iterating over framerates and resolutions until we find a good one if (framerateIndex > 0) LOG_ERROR("Caps parameters haven't been changed and have failed before"); framerate_numerator = gst_value_get_fraction_numerator(val); framerate_denominator = gst_value_get_fraction_denominator(val); } gst_caps_unref(caps); gst_object_unref(srcPad); // use default from gst std::string capsSuffix = boost::lexical_cast<std::string>(framerate_numerator); capsSuffix += "/"; capsSuffix += boost::lexical_cast<std::string>(framerate_denominator); if (v4l2util::isInterlaced(deviceStr())) capsSuffix +=", interlaced=true"; capsSuffix += ", pixel-aspect-ratio="; capsSuffix += config_.pixelAspectRatio(); capsStr << "video/x-raw-yuv, width=" << config_.captureWidth() << ", height=" << config_.captureHeight() << ", framerate=" << capsSuffix; LOG_DEBUG("V4l2src caps are " << capsStr.str()); ret = gst_element_set_state(source_, GST_STATE_NULL); if (ret not_eq GST_STATE_CHANGE_SUCCESS) THROW_ERROR("Could not change v4l2src state to NULL"); return capsStr.str(); }
static void gst_value_fraction_get_extremes (const GValue * v, gint * min_num, gint * min_denom, gint * max_num, gint * max_denom) { if (GST_VALUE_HOLDS_FRACTION (v)) { *min_num = *max_num = gst_value_get_fraction_numerator (v); *min_denom = *max_denom = gst_value_get_fraction_denominator (v); } else if (GST_VALUE_HOLDS_FRACTION_RANGE (v)) { const GValue *min, *max; min = gst_value_get_fraction_range_min (v); *min_num = gst_value_get_fraction_numerator (min); *min_denom = gst_value_get_fraction_denominator (min); max = gst_value_get_fraction_range_max (v); *max_num = gst_value_get_fraction_numerator (max); *max_denom = gst_value_get_fraction_denominator (max); } else if (GST_VALUE_HOLDS_LIST (v)) { gint min_n = G_MAXINT, min_d = 1, max_n = 0, max_d = 1; int i, n; *min_num = G_MAXINT; *min_denom = 1; *max_num = 0; *max_denom = 1; n = gst_value_list_get_size (v); g_assert (n > 0); for (i = 0; i < n; i++) { const GValue *t = gst_value_list_get_value (v, i); gst_value_fraction_get_extremes (t, &min_n, &min_d, &max_n, &max_d); if (gst_util_fraction_compare (min_n, min_d, *min_num, *min_denom) < 0) { *min_num = min_n; *min_denom = min_d; } if (gst_util_fraction_compare (max_n, max_d, *max_num, *max_denom) > 0) { *max_num = max_n; *max_denom = max_d; } } } else { g_warning ("Unknown type for framerate"); *min_num = 0; *min_denom = 1; *max_num = G_MAXINT; *max_denom = 1; } }
static void get_supported_framerates (ofGstVideoFormat &video_format, GstStructure &structure) { const GValue *framerates; ofGstFramerate framerate; framerates = gst_structure_get_value (&structure, "framerate"); if (GST_VALUE_HOLDS_FRACTION (framerates)){ framerate.numerator = gst_value_get_fraction_numerator (framerates); framerate.denominator = gst_value_get_fraction_denominator (framerates); video_format.framerates.push_back(framerate); ofLog(OF_LOG_NOTICE,"%d/%d ", framerate.numerator, framerate.denominator); }else if (GST_VALUE_HOLDS_LIST (framerates)){ int num_framerates = gst_value_list_get_size (framerates); for (int i = 0; i < num_framerates; i++){ const GValue *value = gst_value_list_get_value (framerates, i); framerate.numerator = gst_value_get_fraction_numerator (value); framerate.denominator = gst_value_get_fraction_denominator (value); video_format.framerates.push_back(framerate); ofLog(OF_LOG_NOTICE,"%d/%d ", framerate.numerator, framerate.denominator); } }else if (GST_VALUE_HOLDS_FRACTION_RANGE (framerates)){ int numerator_min, denominator_min, numerator_max, denominator_max; const GValue *fraction_range_min; const GValue *fraction_range_max; fraction_range_min = gst_value_get_fraction_range_min (framerates); numerator_min = gst_value_get_fraction_numerator (fraction_range_min); denominator_min = gst_value_get_fraction_denominator (fraction_range_min); fraction_range_max = gst_value_get_fraction_range_max (framerates); numerator_max = gst_value_get_fraction_numerator (fraction_range_max); denominator_max = gst_value_get_fraction_denominator (fraction_range_max); ofLog(OF_LOG_NOTICE,"from %d/%d to %d/%d", numerator_min, denominator_max, numerator_max, denominator_min); for (int i = numerator_min; i <= numerator_max; i++){ for (int j = denominator_min; j <= denominator_max; j++){ framerate.numerator = i; framerate.denominator = j; video_format.framerates.push_back(framerate); } } }else{ ofLog (OF_LOG_WARNING,"unknown GValue type %s for framerates", G_VALUE_TYPE_NAME (framerates)); } }
static gboolean gst_dx9screencapsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstDX9ScreenCapSrc *src = GST_DX9SCREENCAPSRC (bsrc); GstStructure *structure; const GValue *framerate; structure = gst_caps_get_structure (caps, 0); src->src_rect = src->screen_rect; if (src->capture_w && src->capture_h) { src->src_rect.left += src->capture_x; src->src_rect.top += src->capture_y; src->src_rect.right = src->src_rect.left + src->capture_w; src->src_rect.bottom = src->src_rect.top + src->capture_h; } framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { src->rate_numerator = gst_value_get_fraction_numerator (framerate); src->rate_denominator = gst_value_get_fraction_denominator (framerate); } GST_DEBUG_OBJECT (src, "size %dx%d, %d/%d fps", (gint) (src->src_rect.right - src->src_rect.left), (gint) (src->src_rect.bottom - src->src_rect.top), src->rate_numerator, src->rate_denominator); return TRUE; }
static gboolean gst_gdiscreencapsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps) { GstGDIScreenCapSrc *src = GST_GDISCREENCAPSRC (bsrc); HWND capture; HDC device; GstStructure *structure; const GValue *framerate; structure = gst_caps_get_structure (caps, 0); src->src_rect = src->screen_rect; if (src->capture_w && src->capture_h) { src->src_rect.left += src->capture_x; src->src_rect.top += src->capture_y; src->src_rect.right = src->src_rect.left + src->capture_w; src->src_rect.bottom = src->src_rect.top + src->capture_h; } framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { src->rate_numerator = gst_value_get_fraction_numerator (framerate); src->rate_denominator = gst_value_get_fraction_denominator (framerate); } src->info.bmiHeader.biSize = sizeof (BITMAPINFOHEADER); src->info.bmiHeader.biWidth = src->src_rect.right - src->src_rect.left; src->info.bmiHeader.biHeight = src->src_rect.top - src->src_rect.bottom; src->info.bmiHeader.biPlanes = 1; src->info.bmiHeader.biBitCount = 24; src->info.bmiHeader.biCompression = BI_RGB; src->info.bmiHeader.biSizeImage = 0; src->info.bmiHeader.biXPelsPerMeter = 0; src->info.bmiHeader.biYPelsPerMeter = 0; src->info.bmiHeader.biClrUsed = 0; src->info.bmiHeader.biClrImportant = 0; /* Cleanup first */ if (src->hBitmap != INVALID_HANDLE_VALUE) DeleteObject (src->hBitmap); if (src->memDC != INVALID_HANDLE_VALUE) DeleteDC (src->memDC); /* Allocate */ capture = GetDesktopWindow (); device = GetDC (capture); src->hBitmap = CreateDIBSection (device, &(src->info), DIB_RGB_COLORS, (void **) &(src->dibMem), 0, 0); src->memDC = CreateCompatibleDC (device); SelectObject (src->memDC, src->hBitmap); ReleaseDC (capture, device); GST_DEBUG_OBJECT (src, "size %dx%d, %d/%d fps", (gint) src->info.bmiHeader.biWidth, (gint) (-src->info.bmiHeader.biHeight), src->rate_numerator, src->rate_denominator); return TRUE; }
static void compare_aperture_value (ExifEntry * entry, ExifTagCheckData * testdata) { gdouble gst_value, exif_value; ExifSRational rational; GValue value = { 0 }; if (!gst_tag_list_get_double_index (testdata->taglist, GST_TAG_CAPTURING_FOCAL_RATIO, 0, &gst_value)) { GST_WARNING ("Failed to get focal ratio from taglist"); return; } rational = exif_get_srational (entry->data, exif_data_get_byte_order (entry->parent->parent)); g_value_init (&value, GST_TYPE_FRACTION); gst_value_set_fraction (&value, rational.numerator, rational.denominator); gst_util_fraction_to_double (gst_value_get_fraction_numerator (&value), gst_value_get_fraction_denominator (&value), &exif_value); g_value_unset (&value); exif_value = pow (2, exif_value / 2); GST_LOG ("Aperture value in gst=%lf and in exif=%lf", gst_value, exif_value); fail_unless (ABS (gst_value - exif_value) < 0.001); testdata->result = TRUE; }
static void gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstAspectRatioCrop *aspect_ratio_crop; gboolean recheck = FALSE; aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object); GST_OBJECT_LOCK (aspect_ratio_crop); switch (prop_id) { case ARG_ASPECT_RATIO_CROP: if (GST_VALUE_HOLDS_FRACTION (value)) { aspect_ratio_crop->ar_num = gst_value_get_fraction_numerator (value); aspect_ratio_crop->ar_denom = gst_value_get_fraction_denominator (value); recheck = (GST_PAD_CAPS (aspect_ratio_crop->sink) != NULL); } break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } GST_OBJECT_UNLOCK (aspect_ratio_crop); if (recheck) { gst_aspect_ratio_crop_set_caps (aspect_ratio_crop->sink, GST_PAD_CAPS (aspect_ratio_crop->sink)); } }
static gboolean gst_dc1394_parse_caps (const GstCaps * caps, gint * width, gint * height, gint * rate_numerator, gint * rate_denominator, gint * vmode, gint * bpp) { const GstStructure *structure; GstPadLinkReturn ret; const GValue *framerate; if (gst_caps_get_size (caps) < 1) return FALSE; structure = gst_caps_get_structure (caps, 0); ret = gst_structure_get_int (structure, "width", width); ret &= gst_structure_get_int (structure, "height", height); framerate = gst_structure_get_value (structure, "framerate"); ret &= gst_structure_get_int (structure, "vmode", vmode); ret &= gst_structure_get_int (structure, "bpp", bpp); if (framerate) { *rate_numerator = gst_value_get_fraction_numerator (framerate); *rate_denominator = gst_value_get_fraction_denominator (framerate); } else { ret = FALSE; } return ret; }
static void addTagToMap(const GstTagList *list, const gchar *tag, gpointer user_data) { QMap<QByteArray, QVariant> *map = reinterpret_cast<QMap<QByteArray, QVariant>* >(user_data); GValue val; val.g_type = 0; gst_tag_list_copy_value(&val,list,tag); switch( G_VALUE_TYPE(&val) ) { case G_TYPE_STRING: { const gchar *str_value = g_value_get_string(&val); map->insert(QByteArray(tag), QString::fromUtf8(str_value)); break; } case G_TYPE_INT: map->insert(QByteArray(tag), g_value_get_int(&val)); break; case G_TYPE_UINT: map->insert(QByteArray(tag), g_value_get_uint(&val)); break; case G_TYPE_LONG: map->insert(QByteArray(tag), qint64(g_value_get_long(&val))); break; case G_TYPE_BOOLEAN: map->insert(QByteArray(tag), g_value_get_boolean(&val)); break; case G_TYPE_CHAR: map->insert(QByteArray(tag), g_value_get_char(&val)); break; case G_TYPE_DOUBLE: map->insert(QByteArray(tag), g_value_get_double(&val)); break; default: // GST_TYPE_DATE is a function, not a constant, so pull it out of the switch if (G_VALUE_TYPE(&val) == GST_TYPE_DATE) { const GDate *date = gst_value_get_date(&val); if (g_date_valid(date)) { int year = g_date_get_year(date); int month = g_date_get_month(date); int day = g_date_get_day(date); map->insert(QByteArray(tag), QDate(year,month,day)); if (!map->contains("year")) map->insert("year", year); } } else if (G_VALUE_TYPE(&val) == GST_TYPE_FRACTION) { int nom = gst_value_get_fraction_numerator(&val); int denom = gst_value_get_fraction_denominator(&val); if (denom > 0) { map->insert(QByteArray(tag), double(nom)/denom); } } break; } g_value_unset(&val); }
static VALUE fraction_gvalue2rvalue(const GValue *value) { return rb_funcall(Qnil, rb_intern("Rational"), 2, INT2NUM(gst_value_get_fraction_numerator(value)), INT2NUM(gst_value_get_fraction_denominator(value))); }
bool ofGstVideoPlayer::allocate(int bpp){ if(bIsAllocated) return true; guint64 durationNanos = videoUtils.getDurationNanos(); nFrames = 0; if(GstPad* pad = gst_element_get_static_pad(videoUtils.getSink(), "sink")){ #if GST_VERSION_MAJOR==0 int width,height; if(gst_video_get_size(GST_PAD(pad), &width, &height)){ if(!videoUtils.allocate(width,height,bpp)) return false; }else{ ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height"; return false; } const GValue *framerate = gst_video_frame_rate(pad); fps_n=0; fps_d=0; if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){ fps_n = gst_value_get_fraction_numerator (framerate); fps_d = gst_value_get_fraction_denominator (framerate); nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d; ofLogVerbose("ofGstVideoPlayer") << "allocate(): framerate: " << fps_n << "/" << fps_d; }else{ ofLogWarning("ofGstVideoPlayer") << "allocate(): cannot get framerate, frame seek won't work"; } bIsAllocated = true; #else if(GstCaps *caps = gst_pad_get_current_caps (GST_PAD (pad))){ GstVideoInfo info; gst_video_info_init (&info); if (gst_video_info_from_caps (&info, caps)){ if(!videoUtils.allocate(info.width,info.height,bpp)) return false; }else{ ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height"; return false; } fps_n = info.fps_n; fps_d = info.fps_d; nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d; gst_caps_unref(caps); bIsAllocated = true; }else{ ofLogError("ofGstVideoPlayer") << "allocate(): cannot get pipeline caps"; bIsAllocated = false; } #endif gst_object_unref(GST_OBJECT(pad)); }else{ ofLogError("ofGstVideoPlayer") << "allocate(): cannot get sink pad"; bIsAllocated = false; } return bIsAllocated; }
static gboolean gst_smokeenc_setcaps (GstPad * pad, GstCaps * caps) { GstSmokeEnc *smokeenc; GstStructure *structure; const GValue *framerate; gboolean ret; GstCaps *srccaps; smokeenc = GST_SMOKEENC (gst_pad_get_parent (pad)); structure = gst_caps_get_structure (caps, 0); framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { smokeenc->fps_num = gst_value_get_fraction_numerator (framerate); smokeenc->fps_denom = gst_value_get_fraction_denominator (framerate); } else { smokeenc->fps_num = 0; smokeenc->fps_denom = 1; } gst_structure_get_int (structure, "width", &smokeenc->width); gst_structure_get_int (structure, "height", &smokeenc->height); if ((smokeenc->width & 0x0f) != 0 || (smokeenc->height & 0x0f) != 0) goto width_or_height_notx16; if (!gst_smokeenc_resync (smokeenc)) goto init_failed; srccaps = gst_caps_new_simple ("video/x-smoke", "width", G_TYPE_INT, smokeenc->width, "height", G_TYPE_INT, smokeenc->height, "framerate", GST_TYPE_FRACTION, smokeenc->fps_num, smokeenc->fps_denom, NULL); ret = gst_pad_set_caps (smokeenc->srcpad, srccaps); gst_caps_unref (srccaps); gst_object_unref (smokeenc); return ret; width_or_height_notx16: { GST_WARNING_OBJECT (smokeenc, "width and height must be multiples of 16" ", %dx%d not allowed", smokeenc->width, smokeenc->height); gst_object_unref (smokeenc); return FALSE; } init_failed: { GST_WARNING_OBJECT (smokeenc, "could not init decoder"); gst_object_unref (smokeenc); return FALSE; } }
static void gst_vdp_sink_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { VdpSink *vdp_sink; g_return_if_fail (GST_IS_VDP_SINK (object)); vdp_sink = GST_VDP_SINK (object); switch (prop_id) { case PROP_DISPLAY: vdp_sink->display_name = g_strdup (g_value_get_string (value)); break; case PROP_SYNCHRONOUS: vdp_sink->synchronous = g_value_get_boolean (value); if (vdp_sink->device) { GST_DEBUG_OBJECT (vdp_sink, "XSynchronize called with %s", vdp_sink->synchronous ? "TRUE" : "FALSE"); g_mutex_lock (vdp_sink->x_lock); XSynchronize (vdp_sink->device->display, vdp_sink->synchronous); g_mutex_unlock (vdp_sink->x_lock); } break; case PROP_PIXEL_ASPECT_RATIO: { GValue *tmp; tmp = g_new0 (GValue, 1); g_value_init (tmp, GST_TYPE_FRACTION); if (!g_value_transform (value, tmp)) { GST_WARNING_OBJECT (vdp_sink, "Could not transform string to aspect ratio"); g_free (tmp); } else { GST_DEBUG_OBJECT (vdp_sink, "set PAR to %d/%d", gst_value_get_fraction_numerator (tmp), gst_value_get_fraction_denominator (tmp)); g_free (vdp_sink->par); vdp_sink->par = tmp; } } break; case PROP_HANDLE_EVENTS: gst_vdp_sink_set_event_handling (GST_X_OVERLAY (vdp_sink), g_value_get_boolean (value)); break; case PROP_HANDLE_EXPOSE: vdp_sink->handle_expose = g_value_get_boolean (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void caps_set(GstCaps *caps, signal_user_data_t *ud) { GstStructure *ss; ss = gst_caps_get_structure(caps, 0); if (ss) { gint fps_n, fps_d, width, height; guint num, den, par_n, par_d; gint disp_par_n, disp_par_d; const GValue *par; gst_structure_get_fraction(ss, "framerate", &fps_n, &fps_d); gst_structure_get_int(ss, "width", &width); gst_structure_get_int(ss, "height", &height); par = gst_structure_get_value(ss, "pixel-aspect-ratio"); par_n = gst_value_get_fraction_numerator(par); par_d = gst_value_get_fraction_denominator(par); ghb_screen_par(ud, &disp_par_n, &disp_par_d); gst_video_calculate_display_ratio( &num, &den, width, height, par_n, par_d, disp_par_n, disp_par_d); if (par_n > par_d) width = gst_util_uint64_scale_int(height, num, den); else height = gst_util_uint64_scale_int(width, den, num); if (ghb_dict_get_bool(ud->prefs, "reduce_hd_preview")) { GdkScreen *ss; gint s_w, s_h; ss = gdk_screen_get_default(); s_w = gdk_screen_get_width(ss); s_h = gdk_screen_get_height(ss); if (width > s_w * 80 / 100) { width = s_w * 80 / 100; height = gst_util_uint64_scale_int(width, den, num); } if (height > s_h * 80 / 100) { height = s_h * 80 / 100; width = gst_util_uint64_scale_int(height, num, den); } } if (width != ud->preview->width || height != ud->preview->height) { preview_set_size(ud, width, height); } } }
static gboolean gst_dvdec_sink_setcaps (GstPad * pad, GstCaps * caps) { GstDVDec *dvdec; GstStructure *s; const GValue *par = NULL, *rate = NULL; dvdec = GST_DVDEC (gst_pad_get_parent (pad)); /* first parse the caps */ s = gst_caps_get_structure (caps, 0); /* we allow framerate and PAR to be overwritten. framerate is mandatory. */ if (!(rate = gst_structure_get_value (s, "framerate"))) goto no_framerate; par = gst_structure_get_value (s, "pixel-aspect-ratio"); if (par) { dvdec->par_x = gst_value_get_fraction_numerator (par); dvdec->par_y = gst_value_get_fraction_denominator (par); dvdec->need_par = FALSE; } else { dvdec->par_x = 0; dvdec->par_y = 0; dvdec->need_par = TRUE; } dvdec->framerate_numerator = gst_value_get_fraction_numerator (rate); dvdec->framerate_denominator = gst_value_get_fraction_denominator (rate); dvdec->sink_negotiated = TRUE; dvdec->src_negotiated = FALSE; gst_object_unref (dvdec); return TRUE; /* ERRORS */ no_framerate: { GST_DEBUG_OBJECT (dvdec, "no framerate specified in caps"); gst_object_unref (dvdec); return FALSE; } }
EXPORT_C #endif gboolean gst_video_calculate_display_ratio (guint * dar_n, guint * dar_d, guint video_width, guint video_height, guint video_par_n, guint video_par_d, guint display_par_n, guint display_par_d) { gint num, den; GValue display_ratio = { 0, }; GValue tmp = { 0, }; GValue tmp2 = { 0, }; g_return_val_if_fail (dar_n != NULL, FALSE); g_return_val_if_fail (dar_d != NULL, FALSE); g_value_init (&display_ratio, GST_TYPE_FRACTION); g_value_init (&tmp, GST_TYPE_FRACTION); g_value_init (&tmp2, GST_TYPE_FRACTION); /* Calculate (video_width * video_par_n * display_par_d) / * (video_height * video_par_d * display_par_n) */ gst_value_set_fraction (&display_ratio, video_width, video_height); gst_value_set_fraction (&tmp, video_par_n, video_par_d); if (!gst_value_fraction_multiply (&tmp2, &display_ratio, &tmp)) goto error_overflow; gst_value_set_fraction (&tmp, display_par_d, display_par_n); if (!gst_value_fraction_multiply (&display_ratio, &tmp2, &tmp)) goto error_overflow; num = gst_value_get_fraction_numerator (&display_ratio); den = gst_value_get_fraction_denominator (&display_ratio); g_value_unset (&display_ratio); g_value_unset (&tmp); g_value_unset (&tmp2); g_return_val_if_fail (num > 0, FALSE); g_return_val_if_fail (den > 0, FALSE); *dar_n = num; *dar_d = den; return TRUE; error_overflow: g_value_unset (&display_ratio); g_value_unset (&tmp); g_value_unset (&tmp2); return FALSE; }
static gboolean gst_video_test_src_parse_caps (const GstCaps * caps, gint * width, gint * height, gint * fps_n, gint * fps_d, GstVideoColorimetry * colorimetry, gint * x_inv, gint * y_inv) { const GstStructure *structure; GstPadLinkReturn ret; const GValue *framerate; const gchar *str; GST_DEBUG ("parsing caps"); structure = gst_caps_get_structure (caps, 0); ret = gst_structure_get_int (structure, "width", width); ret &= gst_structure_get_int (structure, "height", height); framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { *fps_n = gst_value_get_fraction_numerator (framerate); *fps_d = gst_value_get_fraction_denominator (framerate); } else goto no_framerate; if ((str = gst_structure_get_string (structure, "colorimetry"))) gst_video_colorimetry_from_string (colorimetry, str); if ((str = gst_structure_get_string (structure, "format"))) { if (g_str_equal (str, "bggr")) { *x_inv = *y_inv = 0; } else if (g_str_equal (str, "rggb")) { *x_inv = *y_inv = 1; } else if (g_str_equal (str, "grbg")) { *x_inv = 0; *y_inv = 1; } else if (g_str_equal (str, "grbg")) { *x_inv = 1; *y_inv = 0; } else goto invalid_format; } return ret; /* ERRORS */ no_framerate: { GST_DEBUG ("videotestsrc no framerate given"); return FALSE; } invalid_format: { GST_DEBUG ("videotestsrc invalid bayer format given"); return FALSE; } }
static gboolean sink_setcaps (GstPad *pad, GstCaps *caps) { GstStructure *structure; GstOmxBaseFilter21 *self; GOmxCore *gomx; GstVideoFormat format; int sink_number; self = GST_OMX_BASE_FILTER21 (GST_PAD_PARENT (pad)); if(strcmp(GST_PAD_NAME(pad), "sink_00") == 0){ sink_number=0; } else if(strcmp(GST_PAD_NAME(pad), "sink_01") == 0){ sink_number=1; } gomx = (GOmxCore *) self->gomx; GST_INFO_OBJECT (self, "setcaps (sink): %d", sink_number); GST_INFO_OBJECT (self, "setcaps (sink): %" GST_PTR_FORMAT, caps); g_return_val_if_fail (caps, FALSE); g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); structure = gst_caps_get_structure (caps, 0); g_return_val_if_fail (structure, FALSE); if (!gst_video_format_parse_caps_strided (caps, &format, &self->in_width[sink_number], &self->in_height[sink_number], &self->in_stride[sink_number])) { GST_WARNING_OBJECT (self, "width and/or height is not set in caps"); return FALSE; } if (!self->in_stride[sink_number]) { self->in_stride[sink_number] = gstomx_calculate_stride (self->in_width[sink_number], format); } { /* Output framerate correspond to the minimum input framerate */ const GValue *sink_framerate = NULL; sink_framerate = gst_structure_get_value (structure, "framerate"); if( GST_VALUE_HOLDS_FRACTION(sink_framerate) ) { if( self->out_framerate == NULL || gst_value_compare(sink_framerate, self->out_framerate) == GST_VALUE_LESS_THAN ) { self->out_framerate = sink_framerate; self->duration = gst_util_uint64_scale_int(GST_SECOND, gst_value_get_fraction_denominator(sink_framerate), gst_value_get_fraction_numerator(sink_framerate)); } } } return gst_pad_set_caps (pad, caps); }
static void gst_glimage_sink_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstGLImageSink *glimage_sink; g_return_if_fail (GST_IS_GLIMAGE_SINK (object)); glimage_sink = GST_GLIMAGE_SINK (object); switch (prop_id) { case ARG_DISPLAY: { g_free (glimage_sink->display_name); glimage_sink->display_name = g_strdup (g_value_get_string (value)); break; } case PROP_CLIENT_RESHAPE_CALLBACK: { glimage_sink->clientReshapeCallback = g_value_get_pointer (value); break; } case PROP_CLIENT_DRAW_CALLBACK: { glimage_sink->clientDrawCallback = g_value_get_pointer (value); break; } case PROP_CLIENT_DATA: { glimage_sink->client_data = g_value_get_pointer (value); break; } case PROP_FORCE_ASPECT_RATIO: { glimage_sink->keep_aspect_ratio = g_value_get_boolean (value); break; } case PROP_PIXEL_ASPECT_RATIO: { glimage_sink->par_n = gst_value_get_fraction_numerator (value); glimage_sink->par_d = gst_value_get_fraction_denominator (value); break; } case PROP_OTHER_CONTEXT: { if (glimage_sink->other_context) gst_object_unref (glimage_sink->other_context); glimage_sink->other_context = g_value_dup_object (value); break; } default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void caps_set(GstCaps *caps, signal_user_data_t *ud) { GstStructure *ss; ss = gst_caps_get_structure(caps, 0); if (ss) { gint fps_n, fps_d, width, height; guint num, den, par_n, par_d; gint disp_par_n, disp_par_d; const GValue *par; gst_structure_get_fraction(ss, "framerate", &fps_n, &fps_d); gst_structure_get_int(ss, "width", &width); gst_structure_get_int(ss, "height", &height); par = gst_structure_get_value(ss, "pixel-aspect-ratio"); par_n = gst_value_get_fraction_numerator(par); par_d = gst_value_get_fraction_denominator(par); ghb_screen_par(ud, &disp_par_n, &disp_par_d); gst_video_calculate_display_ratio( &num, &den, width, height, par_n, par_d, disp_par_n, disp_par_d); if (par_n > par_d) width = gst_util_uint64_scale_int(height, num, den); else height = gst_util_uint64_scale_int(width, den, num); preview_set_size(ud, width, height); if (ghb_dict_get_bool(ud->prefs, "reduce_hd_preview")) { GdkWindow *window; gint s_w, s_h; window = gtk_widget_get_window( GHB_WIDGET(ud->builder, "preview_window")); ghb_monitor_get_size(window, &s_w, &s_h); if (s_w > 0 && s_h > 0) { if (width > s_w * 80 / 100) { width = s_w * 80 / 100; height = gst_util_uint64_scale_int(width, den, num); } if (height > s_h * 80 / 100) { height = s_h * 80 / 100; width = gst_util_uint64_scale_int(height, num, den); } } } } }
static void _get_fraction_range (GstStructure * s, const gchar * field, gint * fps_n_min, gint * fps_d_min, gint * fps_n_max, gint * fps_d_max) { const GValue *value; const GValue *min_v, *max_v; value = gst_structure_get_value (s, field); fail_unless (value != NULL); fail_unless (GST_VALUE_HOLDS_FRACTION_RANGE (value)); min_v = gst_value_get_fraction_range_min (value); fail_unless (GST_VALUE_HOLDS_FRACTION (min_v)); *fps_n_min = gst_value_get_fraction_numerator (min_v); *fps_d_min = gst_value_get_fraction_denominator (min_v); max_v = gst_value_get_fraction_range_max (value); fail_unless (GST_VALUE_HOLDS_FRACTION (max_v)); *fps_n_max = gst_value_get_fraction_numerator (max_v); *fps_d_max = gst_value_get_fraction_denominator (max_v); }
static void gst_video_parse_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstVideoParse *vp = GST_VIDEO_PARSE (object); g_return_if_fail (!gst_raw_parse_is_negotiated (GST_RAW_PARSE (vp))); switch (prop_id) { case PROP_FORMAT: vp->format = g_value_get_enum (value); break; case PROP_WIDTH: vp->width = g_value_get_int (value); break; case PROP_HEIGHT: vp->height = g_value_get_int (value); break; case PROP_FRAMERATE: gst_raw_parse_set_fps (GST_RAW_PARSE (vp), gst_value_get_fraction_numerator (value), gst_value_get_fraction_denominator (value)); break; case PROP_PAR: vp->par_n = gst_value_get_fraction_numerator (value); vp->par_d = gst_value_get_fraction_denominator (value); break; case PROP_INTERLACED: vp->interlaced = g_value_get_boolean (value); break; case PROP_TOP_FIELD_FIRST: vp->top_field_first = g_value_get_boolean (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } gst_video_parse_update_frame_size (vp); }
static void lgm_device_add_format_from_fps_val (GHashTable * table, int width, int height, const GValue * val) { gint fps_n, fps_d; fps_n = gst_value_get_fraction_numerator (val); fps_d = gst_value_get_fraction_denominator (val); if (fps_n == 0) { fps_d = 0; } lgm_device_add_format (table, width, height, fps_n, fps_d); }
void GStreamerWrapper::retrieveVideoInfo() { ////////////////////////////////////////////////////////////////////////// Media Duration // Nanoseconds GstFormat gstFormat = GST_FORMAT_TIME; gst_element_query_duration( GST_ELEMENT( m_GstPipeline ), &gstFormat, &m_iDurationInNs ); // Milliseconds m_dDurationInMs = GST_TIME_AS_MSECONDS( m_iDurationInNs ); ////////////////////////////////////////////////////////////////////////// Stream Info // Number of Video Streams g_object_get( m_GstPipeline, "n-video", &m_iNumVideoStreams, NULL ); // Number of Audio Streams g_object_get( m_GstPipeline, "n-audio", &m_iNumAudioStreams, NULL ); // Set Content Type according to the number of available Video and Audio streams if ( m_iNumVideoStreams > 0 && m_iNumAudioStreams > 0 ) m_ContentType = VIDEO_AND_AUDIO; else if ( m_iNumVideoStreams > 0 ) m_ContentType = VIDEO; else if ( m_iNumAudioStreams > 0 ) m_ContentType = AUDIO; ////////////////////////////////////////////////////////////////////////// Video Data if ( m_iNumVideoStreams > 0 ) { GstPad* gstPad = gst_element_get_static_pad( m_GstVideoSink, "sink" ); if ( gstPad ) { // Video Size gst_video_get_size( GST_PAD( gstPad ), &m_iWidth, &m_iHeight ); // Frame Rate const GValue* framerate = gst_video_frame_rate( gstPad ); int iFpsNumerator = gst_value_get_fraction_numerator( framerate ); int iFpsDenominator = gst_value_get_fraction_denominator( framerate ); // Number of frames m_iNumberOfFrames = (float)( m_iDurationInNs / GST_SECOND ) * (float)iFpsNumerator / (float)iFpsDenominator; // FPS m_fFps = (float)iFpsNumerator / (float)iFpsDenominator; gst_object_unref( gstPad ); } } }
void ghb_screen_par(signal_user_data_t *ud, gint *par_n, gint *par_d) { #if defined(_ENABLE_GST) GValue disp_par = {0,}; GstElement *xover; GObjectClass *klass; GParamSpec *pspec; if (!ud->preview->live_enabled) goto fail; g_value_init(&disp_par, GST_TYPE_FRACTION); gst_value_set_fraction(&disp_par, 1, 1); g_object_get(ud->preview->play, "video-sink", &xover, NULL); if (xover == NULL) goto fail; klass = G_OBJECT_GET_CLASS(xover); if (klass == NULL) goto fail; pspec = g_object_class_find_property(klass, "pixel-aspect_ratio"); if (pspec) { GValue par_prop = {0,}; g_value_init(&par_prop, pspec->value_type); g_object_get_property(G_OBJECT(xover), "pixel-aspect-ratio", &par_prop); if (!g_value_transform(&par_prop, &disp_par)) { g_warning("transform failed"); gst_value_set_fraction(&disp_par, 1, 1); } g_value_unset(&par_prop); } *par_n = gst_value_get_fraction_numerator(&disp_par); *par_d = gst_value_get_fraction_denominator(&disp_par); g_value_unset(&disp_par); return; fail: *par_n = 1; *par_d = 1; #else *par_n = 1; *par_d = 1; #endif }
static PyObject * gi_gst_fraction_from_value (const GValue * value) { PyObject *fraction_type, *args, *fraction; gint numerator, denominator; numerator = gst_value_get_fraction_numerator (value); denominator = gst_value_get_fraction_denominator (value); fraction_type = gi_gst_get_type ("Fraction"); args = Py_BuildValue ("(ii)", numerator, denominator); fraction = PyObject_Call (fraction_type, args, NULL); Py_DECREF (args); return fraction; }
static gboolean src_setcaps (GstPad *pad, GstCaps *caps) { GstOmxBaseFilter21 *self; GstVideoFormat format; GstStructure *structure; self = GST_OMX_BASE_FILTER21 (GST_PAD_PARENT (pad)); structure = gst_caps_get_structure (caps, 0); GST_INFO_OBJECT (self, "setcaps (src): %" GST_PTR_FORMAT, caps); g_return_val_if_fail (caps, FALSE); g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); if (!gst_video_format_parse_caps_strided (caps, &format, &self->out_width, &self->out_height, &self->out_stride)) { GST_WARNING_OBJECT (self, "width and/or height is not set in caps"); return FALSE; } if (!self->out_stride) { self->out_stride = gstomx_calculate_stride (self->out_width, format); } /* Set output framerate already calculated in sink_setcaps */ if( self->out_framerate == NULL ) { GST_WARNING_OBJECT (self, "unable to calculate output framerate"); return FALSE; } gint out_framerate_num = gst_value_get_fraction_numerator(self->out_framerate); gint out_framerate_denom = gst_value_get_fraction_denominator(self->out_framerate); gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, out_framerate_num, out_framerate_denom, NULL); GST_INFO_OBJECT(self, "output framerate is: %d/%d", out_framerate_num, out_framerate_denom); /* save the src caps later needed by omx transport buffer */ if (self->out_port->caps) gst_caps_unref (self->out_port->caps); self->out_port->caps = gst_caps_copy (caps); return TRUE; }