/** * gst_vaapi_profile_from_caps: * @caps: a #GstCaps * * Converts @caps into the corresponding #GstVaapiProfile. If the * profile cannot be represented by #GstVaapiProfile, then zero is * returned. * * Return value: the #GstVaapiProfile describing the @caps */ GstVaapiProfile gst_vaapi_profile_from_caps (const GstCaps * caps) { const GstVaapiProfileMap *m; GstCaps *caps_test; GstStructure *structure; const gchar *profile_str; GstVaapiProfile profile, best_profile; GstBuffer *codec_data = NULL; const gchar *name; gsize namelen; if (!caps) return 0; structure = gst_caps_get_structure (caps, 0); if (!structure) return 0; name = gst_structure_get_name (structure); namelen = strlen (name); profile_str = gst_structure_get_string (structure, "profile"); if (!profile_str) { const GValue *v_codec_data; v_codec_data = gst_structure_get_value (structure, "codec_data"); if (v_codec_data) codec_data = gst_value_get_buffer (v_codec_data); } profile = 0; best_profile = 0; for (m = gst_vaapi_profiles; !profile && m->profile; m++) { if (strncmp (name, m->media_str, namelen) != 0) continue; caps_test = gst_caps_from_string (m->media_str); if (gst_caps_is_always_compatible (caps, caps_test)) { best_profile = m->profile; if (profile_str && m->profile_str && strcmp (profile_str, m->profile_str) == 0) profile = best_profile; } if (!profile) { profile = gst_vaapi_profile_from_codec_data (gst_vaapi_profile_get_codec (m->profile), codec_data); if (!profile && WORKAROUND_QTDEMUX_NO_H263_PROFILES && strncmp (name, "video/x-h263", namelen) == 0) { /* HACK: qtdemux does not report profiles for h263 */ profile = m->profile; } } gst_caps_unref (caps_test); } return profile ? profile : best_profile; }
static gboolean gst_dvdec_sink_setcaps (GstPad * pad, GstCaps * caps) { GstDVDec *dvdec; GstStructure *s; const GValue *par = NULL, *rate = NULL; dvdec = GST_DVDEC (gst_pad_get_parent (pad)); /* first parse the caps */ s = gst_caps_get_structure (caps, 0); /* we allow framerate and PAR to be overwritten. framerate is mandatory. */ if (!(rate = gst_structure_get_value (s, "framerate"))) goto no_framerate; par = gst_structure_get_value (s, "pixel-aspect-ratio"); if (par) { dvdec->par_x = gst_value_get_fraction_numerator (par); dvdec->par_y = gst_value_get_fraction_denominator (par); dvdec->need_par = FALSE; } else { dvdec->par_x = 0; dvdec->par_y = 0; dvdec->need_par = TRUE; } dvdec->framerate_numerator = gst_value_get_fraction_numerator (rate); dvdec->framerate_denominator = gst_value_get_fraction_denominator (rate); dvdec->sink_negotiated = TRUE; dvdec->src_negotiated = FALSE; gst_object_unref (dvdec); return TRUE; /* ERRORS */ no_framerate: { GST_DEBUG_OBJECT (dvdec, "no framerate specified in caps"); gst_object_unref (dvdec); return FALSE; } }
static gboolean sink_setcaps (GstPad *pad, GstCaps *caps) { GstStructure *structure; GstOmxBaseFilter21 *self; GOmxCore *gomx; GstVideoFormat format; int sink_number; self = GST_OMX_BASE_FILTER21 (GST_PAD_PARENT (pad)); if(strcmp(GST_PAD_NAME(pad), "sink_00") == 0){ sink_number=0; } else if(strcmp(GST_PAD_NAME(pad), "sink_01") == 0){ sink_number=1; } gomx = (GOmxCore *) self->gomx; GST_INFO_OBJECT (self, "setcaps (sink): %d", sink_number); GST_INFO_OBJECT (self, "setcaps (sink): %" GST_PTR_FORMAT, caps); g_return_val_if_fail (caps, FALSE); g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); structure = gst_caps_get_structure (caps, 0); g_return_val_if_fail (structure, FALSE); if (!gst_video_format_parse_caps_strided (caps, &format, &self->in_width[sink_number], &self->in_height[sink_number], &self->in_stride[sink_number])) { GST_WARNING_OBJECT (self, "width and/or height is not set in caps"); return FALSE; } if (!self->in_stride[sink_number]) { self->in_stride[sink_number] = gstomx_calculate_stride (self->in_width[sink_number], format); } { /* Output framerate correspond to the minimum input framerate */ const GValue *sink_framerate = NULL; sink_framerate = gst_structure_get_value (structure, "framerate"); if( GST_VALUE_HOLDS_FRACTION(sink_framerate) ) { if( self->out_framerate == NULL || gst_value_compare(sink_framerate, self->out_framerate) == GST_VALUE_LESS_THAN ) { self->out_framerate = sink_framerate; self->duration = gst_util_uint64_scale_int(GST_SECOND, gst_value_get_fraction_denominator(sink_framerate), gst_value_get_fraction_numerator(sink_framerate)); } } } return gst_pad_set_caps (pad, caps); }
static void caps_set(GstCaps *caps, signal_user_data_t *ud) { GstStructure *ss; ss = gst_caps_get_structure(caps, 0); if (ss) { gint fps_n, fps_d, width, height; guint num, den, par_n, par_d; gint disp_par_n, disp_par_d; const GValue *par; gst_structure_get_fraction(ss, "framerate", &fps_n, &fps_d); gst_structure_get_int(ss, "width", &width); gst_structure_get_int(ss, "height", &height); par = gst_structure_get_value(ss, "pixel-aspect-ratio"); par_n = gst_value_get_fraction_numerator(par); par_d = gst_value_get_fraction_denominator(par); ghb_screen_par(ud, &disp_par_n, &disp_par_d); gst_video_calculate_display_ratio( &num, &den, width, height, par_n, par_d, disp_par_n, disp_par_d); if (par_n > par_d) width = gst_util_uint64_scale_int(height, num, den); else height = gst_util_uint64_scale_int(width, den, num); preview_set_size(ud, width, height); if (ghb_dict_get_bool(ud->prefs, "reduce_hd_preview")) { GdkWindow *window; gint s_w, s_h; window = gtk_widget_get_window( GHB_WIDGET(ud->builder, "preview_window")); ghb_monitor_get_size(window, &s_w, &s_h); if (s_w > 0 && s_h > 0) { if (width > s_w * 80 / 100) { width = s_w * 80 / 100; height = gst_util_uint64_scale_int(width, den, num); } if (height > s_h * 80 / 100) { height = s_h * 80 / 100; width = gst_util_uint64_scale_int(height, num, den); } } } } }
static gboolean gst_flac_dec_set_format (GstAudioDecoder * dec, GstCaps * caps) { const GValue *headers; GstFlacDec *flacdec; GstStructure *s; guint i, num; flacdec = GST_FLAC_DEC (dec); GST_LOG_OBJECT (dec, "sink caps: %" GST_PTR_FORMAT, caps); s = gst_caps_get_structure (caps, 0); headers = gst_structure_get_value (s, "streamheader"); if (headers == NULL || !GST_VALUE_HOLDS_ARRAY (headers)) { GST_WARNING_OBJECT (dec, "no 'streamheader' field in input caps, try " "adding a flacparse element upstream"); return FALSE; } if (gst_adapter_available (flacdec->adapter) > 0) { GST_WARNING_OBJECT (dec, "unexpected data left in adapter"); gst_adapter_clear (flacdec->adapter); } num = gst_value_array_get_size (headers); for (i = 0; i < num; ++i) { const GValue *header_val; GstBuffer *header_buf; header_val = gst_value_array_get_value (headers, i); if (header_val == NULL || !GST_VALUE_HOLDS_BUFFER (header_val)) return FALSE; header_buf = g_value_dup_boxed (header_val); GST_INFO_OBJECT (dec, "pushing header buffer of %" G_GSIZE_FORMAT " bytes " "into adapter", gst_buffer_get_size (header_buf)); gst_adapter_push (flacdec->adapter, header_buf); } GST_DEBUG_OBJECT (dec, "Processing headers and metadata"); if (!FLAC__stream_decoder_process_until_end_of_metadata (flacdec->decoder)) { GST_WARNING_OBJECT (dec, "process_until_end_of_metadata failed"); if (FLAC__stream_decoder_get_state (flacdec->decoder) == FLAC__STREAM_DECODER_ABORTED) { GST_WARNING_OBJECT (flacdec, "Read callback caused internal abort"); /* allow recovery */ gst_adapter_clear (flacdec->adapter); FLAC__stream_decoder_flush (flacdec->decoder); gst_flac_dec_handle_decoder_error (flacdec, TRUE); } } GST_INFO_OBJECT (dec, "headers and metadata are now processed"); return TRUE; }
static gboolean gst_video_test_src_parse_caps (const GstCaps * caps, gint * width, gint * height, gint * fps_n, gint * fps_d, GstVideoColorimetry * colorimetry, gint * x_inv, gint * y_inv) { const GstStructure *structure; GstPadLinkReturn ret; const GValue *framerate; const gchar *str; GST_DEBUG ("parsing caps"); structure = gst_caps_get_structure (caps, 0); ret = gst_structure_get_int (structure, "width", width); ret &= gst_structure_get_int (structure, "height", height); framerate = gst_structure_get_value (structure, "framerate"); if (framerate) { *fps_n = gst_value_get_fraction_numerator (framerate); *fps_d = gst_value_get_fraction_denominator (framerate); } else goto no_framerate; if ((str = gst_structure_get_string (structure, "colorimetry"))) gst_video_colorimetry_from_string (colorimetry, str); if ((str = gst_structure_get_string (structure, "format"))) { if (g_str_equal (str, "bggr")) { *x_inv = *y_inv = 0; } else if (g_str_equal (str, "rggb")) { *x_inv = *y_inv = 1; } else if (g_str_equal (str, "grbg")) { *x_inv = 0; *y_inv = 1; } else if (g_str_equal (str, "grbg")) { *x_inv = 1; *y_inv = 0; } else goto invalid_format; } return ret; /* ERRORS */ no_framerate: { GST_DEBUG ("videotestsrc no framerate given"); return FALSE; } invalid_format: { GST_DEBUG ("videotestsrc invalid bayer format given"); return FALSE; } }
std::string VideoV4lSource::srcCaps(unsigned int framerateIndex) const { std::ostringstream capsStr; GstStateChangeReturn ret = gst_element_set_state(source_, GST_STATE_READY); if (ret not_eq GST_STATE_CHANGE_SUCCESS) THROW_ERROR("Could not change v4l2src state to READY"); GstPad *srcPad = gst_element_get_static_pad(source_, "src"); GstCaps *caps = gst_pad_get_caps(srcPad); GstStructure *structure = gst_caps_get_structure(caps, 0); const GValue *val = gst_structure_get_value(structure, "framerate"); LOG_DEBUG("Caps structure from v4l2src srcpad: " << gst_structure_to_string(structure)); gint framerate_numerator, framerate_denominator; if (GST_VALUE_HOLDS_LIST(val)) { // trying another one if (framerateIndex >= gst_value_list_get_size(val)) THROW_ERROR("Framerate index out of range"); framerate_numerator = gst_value_get_fraction_numerator((gst_value_list_get_value(val, framerateIndex))); framerate_denominator = gst_value_get_fraction_denominator((gst_value_list_get_value(val, framerateIndex))); } else { // FIXME: this is really bad, we should be iterating over framerates and resolutions until we find a good one if (framerateIndex > 0) LOG_ERROR("Caps parameters haven't been changed and have failed before"); framerate_numerator = gst_value_get_fraction_numerator(val); framerate_denominator = gst_value_get_fraction_denominator(val); } gst_caps_unref(caps); gst_object_unref(srcPad); // use default from gst std::string capsSuffix = boost::lexical_cast<std::string>(framerate_numerator); capsSuffix += "/"; capsSuffix += boost::lexical_cast<std::string>(framerate_denominator); if (v4l2util::isInterlaced(deviceStr())) capsSuffix +=", interlaced=true"; capsSuffix += ", pixel-aspect-ratio="; capsSuffix += config_.pixelAspectRatio(); capsStr << "video/x-raw-yuv, width=" << config_.captureWidth() << ", height=" << config_.captureHeight() << ", framerate=" << capsSuffix; LOG_DEBUG("V4l2src caps are " << capsStr.str()); ret = gst_element_set_state(source_, GST_STATE_NULL); if (ret not_eq GST_STATE_CHANGE_SUCCESS) THROW_ERROR("Could not change v4l2src state to NULL"); return capsStr.str(); }
static gboolean gst_vaapidecode_update_src_caps(GstVaapiDecode *decode, GstCaps *caps) { GstCaps *other_caps; GstStructure *structure; const GValue *v_width, *v_height, *v_framerate, *v_par, *v_interlaced; gboolean success; if (!decode->srcpad_caps) { decode->srcpad_caps = gst_caps_from_string(GST_VAAPI_SURFACE_CAPS_NAME); if (!decode->srcpad_caps) return FALSE; } structure = gst_caps_get_structure(caps, 0); v_width = gst_structure_get_value(structure, "width"); v_height = gst_structure_get_value(structure, "height"); v_framerate = gst_structure_get_value(structure, "framerate"); v_par = gst_structure_get_value(structure, "pixel-aspect-ratio"); v_interlaced = gst_structure_get_value(structure, "interlaced"); structure = gst_caps_get_structure(decode->srcpad_caps, 0); if (v_width && v_height) { gst_structure_set_value(structure, "width", v_width); gst_structure_set_value(structure, "height", v_height); } if (v_framerate) gst_structure_set_value(structure, "framerate", v_framerate); if (v_par) gst_structure_set_value(structure, "pixel-aspect-ratio", v_par); if (v_interlaced) gst_structure_set_value(structure, "interlaced", v_interlaced); gst_structure_set(structure, "type", G_TYPE_STRING, "vaapi", NULL); gst_structure_set(structure, "opengl", G_TYPE_BOOLEAN, USE_GLX, NULL); other_caps = gst_caps_copy(decode->srcpad_caps); success = gst_pad_set_caps(decode->srcpad, other_caps); gst_caps_unref(other_caps); return success; }
static GValue * gst_spectrum_message_add_container (GstStructure * s, GType type, const gchar * name) { GValue v = { 0, }; g_value_init (&v, type); /* will copy-by-value */ gst_structure_set_value (s, name, &v); g_value_unset (&v); return (GValue *) gst_structure_get_value (s, name); }
static gboolean webkitMediaPlayReadyDecryptSinkEventHandler(GstBaseTransform* trans, GstEvent* event) { gboolean result = FALSE; WebKitMediaPlayReadyDecrypt* self = WEBKIT_MEDIA_PLAYREADY_DECRYPT(trans); switch (GST_EVENT_TYPE(event)) { case GST_EVENT_PROTECTION: { const gchar* systemId; const gchar* origin; GST_INFO_OBJECT(self, "received protection event"); gst_event_parse_protection(event, &systemId, &self->initDataBuffer, &origin); GST_DEBUG_OBJECT(self, "systemId: %s", systemId); if (!g_str_equal(systemId, PLAYREADY_PROTECTION_SYSTEM_ID) || !g_str_has_prefix(origin, "smooth-streaming")) { gst_event_unref(event); result = FALSE; break; } // Keep the event ref around so that the parsed event data // remains valid until the drm-key-need message has been sent. self->protectionEvent = event; g_timeout_add(0, requestKey, self); result = TRUE; break; } case GST_EVENT_CUSTOM_DOWNSTREAM_OOB: { GST_INFO_OBJECT(self, "received OOB event"); g_mutex_lock(&self->mutex); const GstStructure* structure = gst_event_get_structure(event); if (gst_structure_has_name(structure, "dxdrm-session")) { GST_INFO_OBJECT(self, "received dxdrm session"); const GValue* value = gst_structure_get_value(structure, "session"); self->sessionMetaData = reinterpret_cast<WebCore::DiscretixSession*>(g_value_get_pointer(value)); self->streamReceived = TRUE; g_cond_signal(&self->condition); } g_mutex_unlock(&self->mutex); gst_event_unref(event); result = TRUE; break; } default: result = GST_BASE_TRANSFORM_CLASS(parent_class)->sink_event(trans, event); break; } return result; }
bool GTKVideo::on_shmdata_connect(const std::string &shmpath) { shmpath_ = shmpath; g_object_set(G_OBJECT(shmsrc_.get_raw()), "socket-path", shmpath_.c_str(), nullptr); shm_sub_ = std2::make_unique<GstShmdataSubscriber>( shmsrc_.get_raw(), [this]( const std::string &caps){ this->graft_tree(".shmdata.reader." + shmpath_, ShmdataUtils::make_tree(caps, ShmdataUtils::get_category(caps), 0)); GstCaps *gstcaps = gst_caps_from_string(caps.c_str()); On_scope_exit{if (gstcaps) gst_caps_unref(gstcaps);}; GstStructure *caps_struct = gst_caps_get_structure (gstcaps, 0); const GValue *width_val = gst_structure_get_value (caps_struct, "width"); const GValue *height_val = gst_structure_get_value (caps_struct, "height"); this->vid_width_ = g_value_get_int(width_val); this->vid_height_ = g_value_get_int(height_val); this->update_padding(this->video_window_); },
static GstCaps * gst_caps_setter_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps) { GstCapsSetter *filter; GstCaps *ret, *filter_caps; GstStructure *structure, *merge; const gchar *name; gint i, j; filter = GST_CAPS_SETTER (trans); GST_DEBUG_OBJECT (trans, "receiving caps: %" GST_PTR_FORMAT, caps); ret = gst_caps_copy (caps); /* this function is always called with a simple caps */ if (!GST_CAPS_IS_SIMPLE (ret) || direction != GST_PAD_SINK) return ret; structure = gst_caps_get_structure (ret, 0); name = gst_structure_get_name (structure); GST_OBJECT_LOCK (filter); filter_caps = gst_caps_ref (filter->caps); GST_OBJECT_UNLOCK (filter); for (i = 0; i < gst_caps_get_size (filter_caps); ++i) { merge = gst_caps_get_structure (filter_caps, i); if (gst_structure_has_name (merge, name) || !filter->join) { if (!filter->join) gst_structure_set_name (structure, gst_structure_get_name (merge)); if (filter->replace) gst_structure_remove_all_fields (structure); for (j = 0; j < gst_structure_n_fields (merge); ++j) { const gchar *fname; fname = gst_structure_nth_field_name (merge, j); gst_structure_set_value (structure, fname, gst_structure_get_value (merge, fname)); } } } GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret); gst_caps_unref (filter_caps); return ret; }
void gstreamer_determine_video_dimensions(const char *uri, int *video_width, int *video_height) { GMainLoop *loop = g_main_loop_new(NULL, FALSE); char *playbin_launch_str = malloc(strlen(uri) + 64); sprintf(playbin_launch_str, PLAYBIN_STR " uri=%s audio-sink=fakesink video-sink=fakesink", uri); GError *error2 = NULL; GstElement *playbin = gst_parse_launch(playbin_launch_str, &error2); if (error2) { printf("Error: Could not create gstreamer pipeline for identification.\n"); printf("Parse error: %s\n", error2->message); exit(1); } playbin_pipeline = playbin; bus_quit_on_playing = TRUE; GstBus *playbin_bus = gst_pipeline_get_bus(GST_PIPELINE(playbin)); guint type_find_bus_watch_id = gst_bus_add_watch(playbin_bus, bus_callback, loop); gst_object_unref(playbin_bus); gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_READY); gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PLAYING); g_main_loop_run(loop); gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_PAUSED); GstPad *pad = gst_pad_new("", GST_PAD_UNKNOWN); g_signal_emit_by_name(playbin, "get-video-pad", 0, &pad, NULL); GstCaps *caps = gst_pad_get_current_caps(pad); *video_width = g_value_get_int(gst_structure_get_value( gst_caps_get_structure(caps, 0), "width")); *video_height = g_value_get_int(gst_structure_get_value( gst_caps_get_structure(caps, 0), "height")); g_object_unref(pad); gst_element_set_state(GST_ELEMENT(playbin), GST_STATE_NULL); gst_object_unref(GST_OBJECT(playbin)); g_source_remove(type_find_bus_watch_id); g_main_loop_unref(loop); }
static GstCaps * gst_rgb2bayer_transform_caps (GstBaseTransform * trans, GstPadDirection direction, GstCaps * caps, GstCaps * filter) { GstStructure *structure; GstStructure *new_structure; GstCaps *newcaps; const GValue *value; GST_DEBUG_OBJECT (trans, "transforming caps (from) %" GST_PTR_FORMAT, caps); structure = gst_caps_get_structure (caps, 0); if (direction == GST_PAD_SRC) { newcaps = gst_caps_new_empty_simple ("video/x-raw"); } else { newcaps = gst_caps_new_empty_simple ("video/x-bayer"); } new_structure = gst_caps_get_structure (newcaps, 0); value = gst_structure_get_value (structure, "width"); gst_structure_set_value (new_structure, "width", value); value = gst_structure_get_value (structure, "height"); gst_structure_set_value (new_structure, "height", value); value = gst_structure_get_value (structure, "framerate"); gst_structure_set_value (new_structure, "framerate", value); GST_DEBUG_OBJECT (trans, "transforming caps (into) %" GST_PTR_FORMAT, newcaps); if (filter) { GstCaps *tmpcaps = newcaps; newcaps = gst_caps_intersect (newcaps, filter); gst_caps_unref (tmpcaps); } return newcaps; }
static GstCaps * fs_videoanyrate_fixate_caps (GstBaseTransform * base, GstPadDirection direction, GstCaps * caps, GstCaps * othercaps) { GstStructure *ins, *outs; const GValue *from_fr, *to_fr; g_return_val_if_fail (gst_caps_is_fixed (caps), othercaps); othercaps = gst_caps_make_writable (othercaps); GST_DEBUG_OBJECT (base, "trying to fixate othercaps %" GST_PTR_FORMAT " based on caps %" GST_PTR_FORMAT, othercaps, caps); ins = gst_caps_get_structure (caps, 0); outs = gst_caps_get_structure (othercaps, 0); from_fr = gst_structure_get_value (ins, "framerate"); to_fr = gst_structure_get_value (outs, "framerate"); /* we have both PAR but they might not be fixated */ if (from_fr && to_fr && !gst_value_is_fixed (to_fr)) { gint from_fr_n, from_fr_d; /* from_fr should be fixed */ g_return_val_if_fail (gst_value_is_fixed (from_fr), othercaps); from_fr_n = gst_value_get_fraction_numerator (from_fr); from_fr_d = gst_value_get_fraction_denominator (from_fr); GST_DEBUG_OBJECT (base, "fixating to_fr nearest to %d/%d", from_fr_n, from_fr_d); gst_structure_fixate_field_nearest_fraction (outs, "framerate", from_fr_n, from_fr_d); } return gst_caps_fixate (othercaps); }
static gboolean webKitMediaClearKeyDecryptorHandleKeyResponse(WebKitMediaCommonEncryptionDecrypt* self, GstEvent* event) { WebKitMediaClearKeyDecryptPrivate* priv = WEBKIT_MEDIA_CK_DECRYPT_GET_PRIVATE(WEBKIT_MEDIA_CK_DECRYPT(self)); const GstStructure* structure = gst_event_get_structure(event); if (!gst_structure_has_name(structure, "drm-cipher")) return FALSE; const GValue* value = gst_structure_get_value(structure, "key"); priv->key.clear(); priv->key = adoptGRef(gst_buffer_copy(gst_value_get_buffer(value))); return TRUE; }
/* receive spectral data from element message */ static gboolean message_handler (GstBus * bus, GstMessage * message, gpointer data) { if (message->type == GST_MESSAGE_ELEMENT) { const GstStructure *s = gst_message_get_structure (message); const gchar *name = gst_structure_get_name (s); GstClockTime endtime; if (strcmp (name, "spectrum") == 0) { const GValue *magnitudes; const GValue *phases; const GValue *mag, *phase; gdouble freq; guint i; if (!gst_structure_get_clock_time (s, "endtime", &endtime)) endtime = GST_CLOCK_TIME_NONE; // g_print ("New spectrum message, endtime %" GST_TIME_FORMAT "\n", // GST_TIME_ARGS (endtime)); magnitudes = gst_structure_get_value (s, "magnitude"); phases = gst_structure_get_value (s, "phase"); for (i = 0; i < spect_bands; ++i) { freq = (gdouble) ((AUDIOFREQ / 2) * i + AUDIOFREQ / 4) / spect_bands; mag = gst_value_list_get_value (magnitudes, i); phase = gst_value_list_get_value (phases, i); if (mag != NULL && phase != NULL) { g_print ("%g %f\n", freq, g_value_get_float (mag)); } } //system(" perl rtgnuplotter.pl"); } } return TRUE; }
static void _get_int_range (GstStructure * s, const gchar * field, gint * min_v, gint * max_v) { const GValue *value; value = gst_structure_get_value (s, field); fail_unless (value != NULL); fail_unless (GST_VALUE_HOLDS_INT_RANGE (value)); *min_v = gst_value_get_int_range_min (value); *max_v = gst_value_get_int_range_max (value); }
static gboolean snra_json_structure_get_as (const GstStructure * structure, const gchar * fieldname, GType t, GValue * dest) { const GValue *v1 = gst_structure_get_value (structure, fieldname); if (v1 == NULL) return FALSE; g_value_init (dest, t); g_value_transform (v1, dest); return TRUE; }
static GstPadProbeReturn buffer_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) { GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info); GstMapInfo map; gst_buffer_map (buffer, &map, GST_MAP_READ); if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) { GstCaps *caps; GstStructure *s; const GValue *sh; GArray *buffers; GstBuffer *buf; int i; gboolean found = FALSE; n_in_caps++; caps = gst_pad_get_current_caps (pad); s = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_has_field (s, "streamheader")); sh = gst_structure_get_value (s, "streamheader"); buffers = g_value_peek_pointer (sh); assert_equals_int (buffers->len, 3); for (i = 0; i < 3; ++i) { GValue *val; GstMapInfo map2; val = &g_array_index (buffers, GValue, i); buf = g_value_peek_pointer (val); fail_unless (GST_IS_BUFFER (buf)); gst_buffer_map (buf, &map2, GST_MAP_READ); if (map2.size == map.size) { if (memcmp (map2.data, map.data, map.size) == 0) { found = TRUE; } } gst_buffer_unmap (buf, &map2); } fail_unless (found, "Did not find incoming HEADER buffer %p on caps", buffer); gst_caps_unref (caps); } gst_buffer_unmap (buffer, &map); return TRUE; }
static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) { GstBaseVideoDecoder *base_video_decoder; GstBaseVideoDecoderClass *base_video_decoder_class; GstStructure *structure; const GValue *codec_data; GstVideoState *state; gboolean ret = TRUE; base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps); state = &base_video_decoder->state; if (state->codec_data) { gst_buffer_unref (state->codec_data); } memset (state, 0, sizeof (GstVideoState)); structure = gst_caps_get_structure (caps, 0); gst_video_format_parse_caps (caps, NULL, &state->width, &state->height); gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d); gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d); #if 0 /* requires 0.10.23 */ state->have_interlaced = gst_video_format_parse_caps_interlaced (caps, &state->interlaced); #else state->have_interlaced = gst_structure_get_boolean (structure, "interlaced", &state->interlaced); #endif codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) { state->codec_data = gst_value_get_buffer (codec_data); } if (base_video_decoder_class->start) { ret = base_video_decoder_class->start (base_video_decoder); } g_object_unref (base_video_decoder); return ret; }
static void lgm_device_parse_structure (GstStructure * s, GHashTable * table) { gint width, height; const GValue *val; gchar *struct_str; struct_str = gst_structure_to_string (s); GST_DEBUG ("Parsing structure: %s\n", struct_str); g_free (struct_str); width = lgm_device_fixate_int_value (gst_structure_get_value (s, "width")); height = lgm_device_fixate_int_value (gst_structure_get_value (s, "height")); val = gst_structure_get_value (s, "framerate"); if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION) { lgm_device_add_format_from_fps_val (table, width, height, val); } else if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION_RANGE) { /* For sources returning template caps or ranges set framerate to 0/0 */ lgm_device_add_format (table, width, height, 0, 0); } else if (G_VALUE_TYPE (val) == GST_TYPE_ARRAY) { guint n, len; len = gst_value_array_get_size (val); for (n = 0; n < len; n++) { const GValue *kid = gst_value_array_get_value (val, n); lgm_device_add_format_from_fps_val (table, width, height, kid); } } else if (G_VALUE_TYPE (val) == GST_TYPE_LIST) { guint n, len; len = gst_value_list_get_size (val); for (n = 0; n < len; n++) { const GValue *kid = gst_value_list_get_value (val, n); lgm_device_add_format_from_fps_val (table, width, height, kid); } } }
/* receive spectral data from element message */ gboolean AudioGrabber::message_handler (GstBus * bus, GstMessage * message, gpointer data) { if (message->type == GST_MESSAGE_ELEMENT) { const GstStructure *s = gst_message_get_structure (message); const gchar *name = gst_structure_get_name (s); GstClockTime endtime; if (strcmp (name, "spectrum") == 0) { const GValue *magnitudes; const GValue *phases; const GValue *mag, *phase; gdouble freq; int i; if (!gst_structure_get_clock_time (s, "endtime", &endtime)) endtime = GST_CLOCK_TIME_NONE; //g_print ("New spectrum message, endtime %" GST_TIME_FORMAT "\n", // GST_TIME_ARGS (endtime)); magnitudes = gst_structure_get_value (s, "magnitude"); phases = gst_structure_get_value (s, "phase"); for (i = 0; i < 20; ++i) { // TODO 20 = _num_bands freq = (gdouble) ((8000 / 2) * i + 8000 / 4) / 20; // TODO 8000=_freq 8000=_freq 20=_num_bands mag = gst_value_list_get_value (magnitudes, i); phase = gst_value_list_get_value (phases, i); if (mag != NULL && phase != NULL && g_value_get_float (mag) >-50 && freq>16) { g_print ("band %d (freq %g): magnitude %f dB phase %f\n", i, freq, g_value_get_float (mag), g_value_get_float (phase)); g_print ("\n"); } } } } return TRUE; }
static GstFlowReturn vorbis_dec_handle_header_caps (GstVorbisDec * vd) { GstFlowReturn result = GST_FLOW_OK; GstCaps *caps; GstStructure *s = NULL; const GValue *array = NULL; caps = gst_pad_get_current_caps (GST_AUDIO_DECODER_SINK_PAD (vd)); if (caps) s = gst_caps_get_structure (caps, 0); if (s) array = gst_structure_get_value (s, "streamheader"); if (caps) gst_caps_unref (caps); if (array && (gst_value_array_get_size (array) >= MIN_NUM_HEADERS)) { const GValue *value = NULL; GstBuffer *buf = NULL; gint i = 0; while (result == GST_FLOW_OK && i < gst_value_array_get_size (array)) { value = gst_value_array_get_value (array, i); buf = gst_value_get_buffer (value); if (!buf) goto null_buffer; result = vorbis_dec_handle_header_buffer (vd, buf); i++; } } else goto array_error; done: return (result != GST_FLOW_OK ? GST_FLOW_NOT_NEGOTIATED : GST_FLOW_OK); /* ERRORS */ array_error: { GST_WARNING_OBJECT (vd, "streamheader array not found"); result = GST_FLOW_ERROR; goto done; } null_buffer: { GST_WARNING_OBJECT (vd, "streamheader with null buffer received"); result = GST_FLOW_ERROR; goto done; } }
static void check_caps (GstCaps * caps) { GstStructure *s; const GValue *sf, *avcc; const gchar *stream_format; fail_unless (caps != NULL); GST_INFO ("caps %" GST_PTR_FORMAT, caps); s = gst_caps_get_structure (caps, 0); fail_unless (s != NULL); fail_if (!gst_structure_has_name (s, "video/x-h264")); sf = gst_structure_get_value (s, "stream-format"); fail_unless (sf != NULL); fail_unless (G_VALUE_HOLDS_STRING (sf)); stream_format = g_value_get_string (sf); fail_unless (stream_format != NULL); if (strcmp (stream_format, "avc") == 0) { const guint8 *data; GstBuffer *buf; avcc = gst_structure_get_value (s, "codec_data"); fail_unless (avcc != NULL); fail_unless (GST_VALUE_HOLDS_BUFFER (avcc)); buf = gst_value_get_buffer (avcc); fail_unless (buf != NULL); data = GST_BUFFER_DATA (buf); fail_unless_equals_int (data[0], 1); /* should be either baseline, main profile or extended profile */ fail_unless (data[1] == 0x42 || data[1] == 0x4D || data[1] == 0x58); } else if (strcmp (stream_format, "byte-stream") == 0) { fail_if (gst_structure_get_value (s, "codec_data") != NULL); } else { fail_if (TRUE, "unexpected stream-format in caps: %s", stream_format); } }
/* this function is a bit of a last resort */ static void gst_v4lsrc_fixate (GstBaseSrc * bsrc, GstCaps * caps) { GstStructure *structure; int i; int targetwidth, targetheight; GstV4lSrc *v4lsrc = GST_V4LSRC (bsrc); struct video_capability *vcap = &GST_V4LELEMENT (v4lsrc)->vcap; struct video_window *vwin = &GST_V4LELEMENT (v4lsrc)->vwin; if (GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc))) { GST_DEBUG_OBJECT (v4lsrc, "device reported w: %d-%d, h: %d-%d", vcap->minwidth, vcap->maxwidth, vcap->minheight, vcap->maxheight); targetwidth = vcap->minwidth; targetheight = vcap->minheight; /* if we can get the current vwin settings, we use those to fixate */ if (!gst_v4l_get_capabilities (GST_V4LELEMENT (v4lsrc))) GST_DEBUG_OBJECT (v4lsrc, "failed getting capabilities"); else { targetwidth = vwin->width; targetheight = vwin->height; } } else { GST_DEBUG_OBJECT (v4lsrc, "device closed, guessing"); targetwidth = 320; targetheight = 200; } GST_DEBUG_OBJECT (v4lsrc, "targetting %dx%d", targetwidth, targetheight); for (i = 0; i < gst_caps_get_size (caps); ++i) { const GValue *v; structure = gst_caps_get_structure (caps, i); gst_structure_fixate_field_nearest_int (structure, "width", targetwidth); gst_structure_fixate_field_nearest_int (structure, "height", targetheight); gst_structure_fixate_field_nearest_fraction (structure, "framerate", 15, 2); v = gst_structure_get_value (structure, "format"); if (v && G_VALUE_TYPE (v) != GST_TYPE_FOURCC) { guint32 fourcc; g_return_if_fail (G_VALUE_TYPE (v) == GST_TYPE_LIST); fourcc = gst_value_get_fourcc (gst_value_list_get_value (v, 0)); gst_structure_set (structure, "format", GST_TYPE_FOURCC, fourcc, NULL); } } }
static void check_pad_template (GstPadTemplate * tmpl) { const GValue *list_val, *fmt_val; GstStructure *s; gboolean *formats_supported; GstCaps *caps; guint i, num_formats; num_formats = get_num_formats (); formats_supported = g_new0 (gboolean, num_formats); caps = gst_pad_template_get_caps (tmpl); /* If this fails, we need to update this unit test */ fail_unless_equals_int (gst_caps_get_size (caps), 1); s = gst_caps_get_structure (caps, 0); fail_unless (gst_structure_has_name (s, "video/x-raw")); list_val = gst_structure_get_value (s, "format"); fail_unless (list_val != NULL); /* If this fails, we need to update this unit test */ fail_unless (GST_VALUE_HOLDS_LIST (list_val)); for (i = 0; i < gst_value_list_get_size (list_val); ++i) { GstVideoFormat fmt; const gchar *fmt_str; fmt_val = gst_value_list_get_value (list_val, i); fail_unless (G_VALUE_HOLDS_STRING (fmt_val)); fmt_str = g_value_get_string (fmt_val); GST_LOG ("format string: '%s'", fmt_str); fmt = gst_video_format_from_string (fmt_str); fail_unless (fmt != GST_VIDEO_FORMAT_UNKNOWN); formats_supported[(guint) fmt] = TRUE; } gst_caps_unref (caps); for (i = 2; i < num_formats; ++i) { if (!formats_supported[i]) { g_error ("videoconvert doesn't support format '%s'", gst_video_format_to_string ((GstVideoFormat) i)); } } g_free (formats_supported); }
/* * Takes caps and copies its audio fields to tmpl_caps */ static GstCaps * __gst_audio_element_proxy_caps (GstElement * element, GstCaps * templ_caps, GstCaps * caps) { GstCaps *result = gst_caps_new_empty (); gint i, j; gint templ_caps_size = gst_caps_get_size (templ_caps); gint caps_size = gst_caps_get_size (caps); for (i = 0; i < templ_caps_size; i++) { GQuark q_name = gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i)); GstCapsFeatures *features = gst_caps_get_features (templ_caps, i); for (j = 0; j < caps_size; j++) { const GstStructure *caps_s = gst_caps_get_structure (caps, j); const GValue *val; GstStructure *s; GstCaps *tmp = gst_caps_new_empty (); s = gst_structure_new_id_empty (q_name); if ((val = gst_structure_get_value (caps_s, "rate"))) gst_structure_set_value (s, "rate", val); if ((val = gst_structure_get_value (caps_s, "channels"))) gst_structure_set_value (s, "channels", val); if ((val = gst_structure_get_value (caps_s, "channels-mask"))) gst_structure_set_value (s, "channels-mask", val); gst_caps_append_structure_full (tmp, s, gst_caps_features_copy (features)); result = gst_caps_merge (result, tmp); } } return result; }
static gdouble gst_msg_db_to_percent(GstMessage *msg, gchar *value_name) { const GValue *list; const GValue *value; gdouble value_db; gdouble percent; list = gst_structure_get_value( gst_message_get_structure(msg), value_name); value = gst_value_list_get_value(list, 0); value_db = g_value_get_double(value); percent = pow(10, value_db / 20); return (percent > 1.0) ? 1.0 : percent; }
static void dvb_base_bin_pat_info_cb (DvbBaseBin * dvbbasebin, const GstStructure * pat_info) { DvbBaseBinProgram *program; DvbBaseBinStream *stream; const GValue *value; GstStructure *program_info; guint program_number; guint pid; guint old_pmt_pid; gint i; gboolean rebuild_filter = FALSE; const GValue *programs; programs = gst_structure_get_value (pat_info, "programs"); for (i = 0; i < gst_value_list_get_size (programs); ++i) { value = gst_value_list_get_value (programs, i); program_info = g_value_get_boxed (value); gst_structure_get_uint (program_info, "program-number", &program_number); gst_structure_get_uint (program_info, "pid", &pid); program = dvb_base_bin_get_program (dvbbasebin, program_number); if (program == NULL) program = dvb_base_bin_add_program (dvbbasebin, program_number); old_pmt_pid = program->pmt_pid; program->pmt_pid = pid; if (program->selected) { /* PAT update */ if (old_pmt_pid != G_MAXUINT16 && old_pmt_pid != program->pmt_pid) dvb_base_bin_get_stream (dvbbasebin, old_pmt_pid)->usecount -= 1; stream = dvb_base_bin_get_stream (dvbbasebin, program->pmt_pid); if (stream == NULL) stream = dvb_base_bin_add_stream (dvbbasebin, program->pmt_pid); stream->usecount += 1; rebuild_filter = TRUE; } } if (rebuild_filter) dvb_base_bin_rebuild_filter (dvbbasebin); }