static void gst_device_monitor_dispose (GObject * object) { GstDeviceMonitor *self = GST_DEVICE_MONITOR (object); g_return_if_fail (!self->priv->started); if (self->priv->providers) { while (self->priv->providers->len) gst_device_monitor_remove (self, self->priv->providers->len - 1); g_ptr_array_unref (self->priv->providers); self->priv->providers = NULL; } if (self->priv->filters) { g_ptr_array_unref (self->priv->filters); self->priv->filters = NULL; } gst_object_replace ((GstObject **) & self->priv->bus, NULL); G_OBJECT_CLASS (gst_device_monitor_parent_class)->dispose (object); }
static GstStateChangeReturn gst_gdk_pixbuf_dec_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; GstGdkPixbufDec *dec = GST_GDK_PIXBUF_DEC (element); switch (transition) { case GST_STATE_CHANGE_READY_TO_PAUSED: /* default to single image mode, setcaps function might not be called */ dec->in_fps_n = 0; dec->in_fps_d = 1; gst_video_info_init (&dec->info); break; default: break; } ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); if (ret == GST_STATE_CHANGE_FAILURE) return ret; switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: dec->in_fps_n = 0; dec->in_fps_d = 0; if (dec->pool) { gst_buffer_pool_set_active (dec->pool, FALSE); gst_object_replace ((GstObject **) & dec->pool, NULL); } break; default: break; } return ret; }
static inline GstAllocator * create_dmabuf_srcpad_allocator (GstVaapiPluginBase * plugin, GstVideoInfo * vinfo, gboolean check_for_map) { GstAllocator *allocator; if (!GST_IS_VIDEO_DECODER (plugin) && !GST_IS_BASE_TRANSFORM (plugin)) return NULL; allocator = gst_vaapi_dmabuf_allocator_new (plugin->display, vinfo, get_dmabuf_surface_allocation_flags (), GST_PAD_SRC); if (!allocator || !check_for_map) return allocator; /* the dmabuf allocator *must* be capable to map a buffer with raw * caps and the there's no evidence of downstream dmabuf * importation */ if (!gst_vaapi_dmabuf_can_map (plugin->display, allocator)) { GST_INFO_OBJECT (plugin, "dmabuf allocator generates unmappable buffers"); gst_object_replace ((GstObject **) & allocator, NULL); } return allocator; }
static void _bus_handler (GstBus * bus, GstMessage * message, GstValidatePipelineMonitor * monitor) { GError *err = NULL; gchar *debug = NULL; const GstStructure *details = NULL; gint error_flow = GST_FLOW_OK; switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: gst_message_parse_error (message, &err, &debug); gst_message_parse_error_details (message, &details); if (g_error_matches (err, GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)) { GST_VALIDATE_REPORT (monitor, MISSING_PLUGIN, "Error: %s -- Debug message: %s", err->message, debug); } else if ((g_error_matches (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED) && details && gst_structure_get_int (details, "flow-return", &error_flow) && error_flow == GST_FLOW_NOT_NEGOTIATED) || g_error_matches (err, GST_STREAM_ERROR, GST_STREAM_ERROR_FORMAT)) { gchar *report = _generate_not_negotiated_error_report (message); GST_VALIDATE_REPORT (monitor, NOT_NEGOTIATED, "%s", report); g_free (report); } else { GST_VALIDATE_REPORT (monitor, ERROR_ON_BUS, "Got error: %s -- Debug message: %s", err->message, debug); } GST_VALIDATE_MONITOR_LOCK (monitor); monitor->got_error = TRUE; GST_VALIDATE_MONITOR_UNLOCK (monitor); g_error_free (err); g_free (debug); break; case GST_MESSAGE_WARNING: gst_message_parse_warning (message, &err, &debug); GST_VALIDATE_REPORT (monitor, WARNING_ON_BUS, "Got warning: %s -- Debug message: %s", err->message, debug); g_error_free (err); g_free (debug); break; case GST_MESSAGE_STATE_CHANGED: { if (GST_MESSAGE_SRC (message) == GST_VALIDATE_MONITOR (monitor)->target) { GstState oldstate, newstate, pending; gst_message_parse_state_changed (message, &oldstate, &newstate, &pending); if (oldstate == GST_STATE_READY && newstate == GST_STATE_PAUSED) { monitor->print_pos_srcid = g_timeout_add (PRINT_POSITION_TIMEOUT, (GSourceFunc) print_position, monitor); } else if (oldstate >= GST_STATE_PAUSED && newstate <= GST_STATE_READY) { if (monitor->print_pos_srcid && g_source_remove (monitor->print_pos_srcid)) monitor->print_pos_srcid = 0; monitor->got_error = FALSE; } } break; } case GST_MESSAGE_BUFFERING: { JsonBuilder *jbuilder = json_builder_new (); GstBufferingMode mode; gint percent; gst_message_parse_buffering (message, &percent); gst_message_parse_buffering_stats (message, &mode, NULL, NULL, NULL); json_builder_begin_object (jbuilder); json_builder_set_member_name (jbuilder, "type"); json_builder_add_string_value (jbuilder, "buffering"); json_builder_set_member_name (jbuilder, "state"); if (percent == 100) { /* a 100% message means buffering is done */ gst_validate_printf (NULL, "\nDone buffering\n"); json_builder_add_string_value (jbuilder, "done"); if (monitor->buffering) { monitor->print_pos_srcid = g_timeout_add (PRINT_POSITION_TIMEOUT, (GSourceFunc) print_position, monitor); monitor->buffering = FALSE; } } else { /* buffering... */ if (!monitor->buffering) { monitor->buffering = TRUE; gst_validate_printf (NULL, "\nStart buffering\n"); json_builder_add_string_value (jbuilder, "started"); if (monitor->print_pos_srcid && g_source_remove (monitor->print_pos_srcid)) { monitor->print_pos_srcid = 0; } } else { json_builder_add_string_value (jbuilder, "progress"); } gst_validate_printf (NULL, "%s %d%% \r", "Buffering...", percent); } json_builder_set_member_name (jbuilder, "position"); json_builder_add_int_value (jbuilder, percent); json_builder_end_object (jbuilder); gst_validate_send (json_builder_get_root (jbuilder)); g_object_unref (jbuilder); break; } case GST_MESSAGE_STREAM_COLLECTION: { GstStreamCollection *collection = NULL; gst_message_parse_stream_collection (message, &collection); gst_object_replace ((GstObject **) & monitor->stream_collection, (GstObject *) collection); gst_object_unref (collection); break; } case GST_MESSAGE_STREAMS_SELECTED: { guint i; if (monitor->streams_selected) { g_list_free_full (monitor->streams_selected, gst_object_unref); monitor->streams_selected = NULL; } for (i = 0; i < gst_message_streams_selected_get_size (message); i++) { GstStream *stream = gst_message_streams_selected_get_stream (message, i); monitor->streams_selected = g_list_append (monitor->streams_selected, stream); } break; } default: break; } }
static gboolean gst_msdkvpp_initialize (GstMsdkVPP * thiz) { mfxSession session; mfxStatus status; mfxFrameAllocRequest request[2]; if (!thiz->context) { GST_WARNING_OBJECT (thiz, "No MSDK Context"); return FALSE; } GST_OBJECT_LOCK (thiz); session = gst_msdk_context_get_session (thiz->context); if (thiz->use_video_memory) { gst_msdk_set_frame_allocator (thiz->context); thiz->param.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY | MFX_IOPATTERN_OUT_VIDEO_MEMORY; } else { thiz->param.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY | MFX_IOPATTERN_OUT_SYSTEM_MEMORY; } /* update input video attributes */ gst_msdk_set_mfx_frame_info_from_video_info (&thiz->param.vpp.In, &thiz->sinkpad_info); /* update output video attributes, only CSC and Scaling are supported for now */ gst_msdk_set_mfx_frame_info_from_video_info (&thiz->param.vpp.Out, &thiz->srcpad_info); /* use msdk frame rarte control if there is a mismatch in In & OUt fps */ if (GST_VIDEO_INFO_FPS_N (&thiz->srcpad_info) && (GST_VIDEO_INFO_FPS_N (&thiz->sinkpad_info) != GST_VIDEO_INFO_FPS_N (&thiz->srcpad_info) || GST_VIDEO_INFO_FPS_D (&thiz->sinkpad_info) != GST_VIDEO_INFO_FPS_D (&thiz->srcpad_info))) { thiz->flags |= GST_MSDK_FLAG_FRC; /* So far this is the only algorithm which is working somewhat good */ thiz->frc_algm = MFX_FRCALGM_PRESERVE_TIMESTAMP; } /* work-around to avoid zero fps in msdk structure */ if (!thiz->param.vpp.In.FrameRateExtN) thiz->param.vpp.In.FrameRateExtN = 30; if (!thiz->param.vpp.Out.FrameRateExtN) thiz->param.vpp.Out.FrameRateExtN = thiz->param.vpp.In.FrameRateExtN; /* set vpp out picstruct as progressive if deinterlacing enabled */ if (thiz->flags & GST_MSDK_FLAG_DEINTERLACE) thiz->param.vpp.Out.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; /* Enable the required filters */ ensure_filters (thiz); /* Add exteneded buffers */ if (thiz->num_extra_params) { thiz->param.NumExtParam = thiz->num_extra_params; thiz->param.ExtParam = thiz->extra_params; } /* validate parameters and allow the Media SDK to make adjustments */ status = MFXVideoVPP_Query (session, &thiz->param, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Video VPP Query failed (%s)", msdk_status_to_string (status)); goto no_vpp; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Video VPP Query returned: %s", msdk_status_to_string (status)); } status = MFXVideoVPP_QueryIOSurf (session, &thiz->param, request); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "VPP Query IO surfaces failed (%s)", msdk_status_to_string (status)); goto no_vpp; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "VPP Query IO surfaces returned: %s", msdk_status_to_string (status)); } if (thiz->use_video_memory) { /* Input surface pool pre-allocation */ request[0].Type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET; if (thiz->use_sinkpad_dmabuf) request[0].Type |= MFX_MEMTYPE_EXPORT_FRAME; gst_msdk_frame_alloc (thiz->context, &(request[0]), &thiz->in_alloc_resp); /* Output surface pool pre-allocation */ request[1].Type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET; if (thiz->use_srcpad_dmabuf) request[1].Type |= MFX_MEMTYPE_EXPORT_FRAME; gst_msdk_frame_alloc (thiz->context, &(request[1]), &thiz->out_alloc_resp); } thiz->in_num_surfaces = request[0].NumFrameSuggested; thiz->out_num_surfaces = request[1].NumFrameSuggested; status = MFXVideoVPP_Init (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Init failed (%s)", msdk_status_to_string (status)); goto no_vpp; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Init returned: %s", msdk_status_to_string (status)); } thiz->initialized = TRUE; GST_OBJECT_UNLOCK (thiz); return TRUE; no_vpp: GST_OBJECT_UNLOCK (thiz); if (thiz->context) gst_object_replace ((GstObject **) & thiz->context, NULL); return FALSE; }
/** * gst_vaapi_plugin_base_decide_allocation: * @plugin: a #GstVaapiPluginBase * @query: the allocation query to parse * @feature: the desired #GstVaapiCapsFeature, or zero to find the * preferred one * * Decides allocation parameters for the downstream elements. * * Returns: %TRUE if successful, %FALSE otherwise. */ gboolean gst_vaapi_plugin_base_decide_allocation (GstVaapiPluginBase * plugin, GstQuery * query, guint feature) { GstCaps *caps = NULL; GstBufferPool *pool; GstStructure *config; GstVideoInfo vi; guint size, min, max; gboolean update_pool = FALSE; gboolean has_video_meta = FALSE; gboolean has_video_alignment = FALSE; #if (USE_GLX || USE_EGL) gboolean has_texture_upload_meta = FALSE; guint idx; #endif g_return_val_if_fail (plugin->display != NULL, FALSE); gst_query_parse_allocation (query, &caps, NULL); /* We don't need any GL context beyond this point if not requested so explicitly through GstVideoGLTextureUploadMeta */ gst_object_replace (&plugin->gl_context, NULL); if (!caps) goto error_no_caps; if (!feature) feature = gst_vaapi_find_preferred_caps_feature (plugin->srcpad, GST_VIDEO_FORMAT_ENCODED, NULL); has_video_meta = gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL); #if (USE_GLX || USE_EGL) has_texture_upload_meta = gst_query_find_allocation_meta (query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx) && (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META); #if USE_GST_GL_HELPERS if (has_texture_upload_meta) { const GstStructure *params; GstObject *gl_context; gst_query_parse_nth_allocation_meta (query, idx, ¶ms); if (params) { if (gst_structure_get (params, "gst.gl.GstGLContext", GST_GL_TYPE_CONTEXT, &gl_context, NULL) && gl_context) { gst_vaapi_plugin_base_set_gl_context (plugin, gl_context); gst_object_unref (gl_context); } } } #endif #endif /* Make sure the display we pass down to the buffer pool is actually the expected one, especially when the downstream element requires a GLX or EGL display */ if (!gst_vaapi_plugin_base_ensure_display (plugin)) goto error_ensure_display; gst_video_info_init (&vi); gst_video_info_from_caps (&vi, caps); if (GST_VIDEO_INFO_FORMAT (&vi) == GST_VIDEO_FORMAT_ENCODED) gst_video_info_set_format (&vi, GST_VIDEO_FORMAT_I420, GST_VIDEO_INFO_WIDTH (&vi), GST_VIDEO_INFO_HEIGHT (&vi)); if (gst_query_get_n_allocation_pools (query) > 0) { gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); update_pool = TRUE; size = MAX (size, vi.size); if (pool) { /* Check whether downstream element proposed a bufferpool but did not provide a correct propose_allocation() implementation */ has_video_alignment = gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); } } else { pool = NULL; size = vi.size; min = max = 0; } /* GstVaapiVideoMeta is mandatory, and this implies VA surface memory */ if (!pool || !gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) { GST_INFO_OBJECT (plugin, "%s. Making a new pool", pool == NULL ? "No pool" : "Pool hasn't GstVaapiVideoMeta"); if (pool) gst_object_unref (pool); pool = gst_vaapi_video_buffer_pool_new (plugin->display); if (!pool) goto error_create_pool; config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_set_params (config, caps, size, min, max); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META); if (!gst_buffer_pool_set_config (pool, config)) goto config_failed; } /* Check whether GstVideoMeta, or GstVideoAlignment, is needed (raw video) */ if (has_video_meta) { if (!gst_vaapi_plugin_base_set_pool_config (pool, GST_BUFFER_POOL_OPTION_VIDEO_META)) goto config_failed; } else if (has_video_alignment) { if (!gst_vaapi_plugin_base_set_pool_config (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) goto config_failed; } /* GstVideoGLTextureUploadMeta (OpenGL) */ #if (USE_GLX || USE_EGL) if (has_texture_upload_meta) { if (!gst_vaapi_plugin_base_set_pool_config (pool, GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META)) goto config_failed; } #endif if (update_pool) gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max); else gst_query_add_allocation_pool (query, pool, size, min, max); g_clear_object (&plugin->srcpad_buffer_pool); plugin->srcpad_buffer_pool = pool; return TRUE; /* ERRORS */ error_no_caps: { GST_ERROR_OBJECT (plugin, "no caps specified"); return FALSE; } error_ensure_display: { GST_ERROR_OBJECT (plugin, "failed to ensure display of type %d", plugin->display_type_req); return FALSE; } error_create_pool: { GST_ERROR_OBJECT (plugin, "failed to create buffer pool"); return FALSE; } config_failed: { if (pool) gst_object_unref (pool); GST_ELEMENT_ERROR (plugin, RESOURCE, SETTINGS, ("Failed to configure the buffer pool"), ("Configuration is most likely invalid, please report this issue.")); return FALSE; } }
void mate_volume_applet_preferences_change (MateVolumeAppletPreferences *prefs, GstMixer *mixer, GList *tracks) { GtkTreeIter iter; GtkTreeSelection *sel; GtkListStore *store; GtkTreeModel *model; const GList *item; gchar *label; gboolean change = (mixer != prefs->mixer), res; GList *tree_iter; /* because the old list of tracks is cleaned out when the application removes * all the tracks, we need to keep a backup of the list before clearing it */ GList *old_selected_tracks = g_list_copy (tracks); prefs->track_lock = TRUE; if (change) { /* remove old */ model = gtk_tree_view_get_model (GTK_TREE_VIEW (prefs->treeview)); store = GTK_LIST_STORE (model); while (gtk_tree_model_get_iter_first (GTK_TREE_MODEL (store), &iter)) { gtk_list_store_remove (store, &iter); } /* take/put reference */ gst_object_replace ((GstObject **) &prefs->mixer, GST_OBJECT (mixer)); /* select active element */ model = gtk_combo_box_get_model (GTK_COMBO_BOX (prefs->optionmenu)); for (res = gtk_tree_model_get_iter_first (model, &iter); res; res = gtk_tree_model_iter_next (model, &iter)) { gtk_tree_model_get (model, &iter, COL_LABEL, &label, -1); if (!strcmp (label, g_object_get_data (G_OBJECT (mixer), "mate-volume-applet-name"))) { gtk_combo_box_set_active_iter (GTK_COMBO_BOX (prefs->optionmenu), &iter); } g_free (label); } /* now over to the tracks */ model = gtk_tree_view_get_model (GTK_TREE_VIEW (prefs->treeview)); store = GTK_LIST_STORE (model); sel = gtk_tree_view_get_selection (GTK_TREE_VIEW (prefs->treeview)); /* add all tracks */ for (item = gst_mixer_list_tracks (mixer); item; item = item->next) { GstMixerTrack *track = item->data; if (track->num_channels <= 0) continue; gtk_list_store_append (store, &iter); gtk_list_store_set (store, &iter, COL_LABEL, track->label, COL_TRACK, track, -1); /* select active tracks */ for (tree_iter = g_list_first (old_selected_tracks); tree_iter; tree_iter = tree_iter->next) { GstMixerTrack *test_against = tree_iter->data; if (!strcmp (test_against->label, track->label)) gtk_tree_selection_select_iter (sel, &iter); } } } else { model = gtk_tree_view_get_model (GTK_TREE_VIEW (prefs->treeview)); sel = gtk_tree_view_get_selection (GTK_TREE_VIEW (prefs->treeview)); gtk_tree_selection_unselect_all (sel); for (res = gtk_tree_model_get_iter_first (model, &iter); res == TRUE; res = gtk_tree_model_iter_next (model, &iter)) { gtk_tree_model_get (model, &iter, COL_LABEL, &label, -1); /* select active tracks */ for (tree_iter = g_list_first (old_selected_tracks); tree_iter; tree_iter = tree_iter->next) { GstMixerTrack *track = tree_iter->data; if (!strcmp (track->label, label)) gtk_tree_selection_select_iter (sel, &iter); } g_free (label); } } prefs->track_lock = FALSE; g_list_free (old_selected_tracks); }
static gboolean gst_wayland_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) { GstWaylandSink *sink; GstBufferPool *newpool; GstVideoInfo info; enum wl_shm_format format; GArray *formats; gint i; GstStructure *structure; sink = GST_WAYLAND_SINK (bsink); GST_DEBUG_OBJECT (sink, "set caps %" GST_PTR_FORMAT, caps); /* extract info from caps */ if (!gst_video_info_from_caps (&info, caps)) goto invalid_format; format = gst_video_format_to_wl_shm_format (GST_VIDEO_INFO_FORMAT (&info)); if ((gint) format == -1) goto invalid_format; /* verify we support the requested format */ formats = sink->display->shm_formats; for (i = 0; i < formats->len; i++) { if (g_array_index (formats, uint32_t, i) == format) break; } if (i >= formats->len) goto unsupported_format; /* create a new pool for the new configuration */ newpool = gst_video_buffer_pool_new (); if (!newpool) goto pool_failed; structure = gst_buffer_pool_get_config (newpool); gst_buffer_pool_config_set_params (structure, caps, info.size, 2, 0); gst_buffer_pool_config_set_allocator (structure, gst_wl_shm_allocator_get (), NULL); if (!gst_buffer_pool_set_config (newpool, structure)) goto config_failed; /* store the video info */ sink->video_info = info; sink->video_info_changed = TRUE; gst_object_replace ((GstObject **) & sink->pool, (GstObject *) newpool); gst_object_unref (newpool); return TRUE; invalid_format: { GST_DEBUG_OBJECT (sink, "Could not locate image format from caps %" GST_PTR_FORMAT, caps); return FALSE; } unsupported_format: { GST_DEBUG_OBJECT (sink, "Format %s is not available on the display", gst_wl_shm_format_to_string (format)); return FALSE; } pool_failed: { GST_DEBUG_OBJECT (sink, "Failed to create new pool"); return FALSE; } config_failed: { GST_DEBUG_OBJECT (bsink, "failed setting config"); gst_object_unref (newpool); return FALSE; } }
static gboolean gst_vaapi_video_buffer_pool_set_config (GstBufferPool * pool, GstStructure * config) { GstVaapiVideoBufferPoolPrivate *const priv = GST_VAAPI_VIDEO_BUFFER_POOL (pool)->priv; GstCaps *caps; GstVideoInfo new_allocation_vinfo; const GstVideoInfo *allocator_vinfo; const GstVideoInfo *negotiated_vinfo; GstVideoAlignment align; GstAllocator *allocator; gboolean ret, updated = FALSE; guint size, min_buffers, max_buffers; guint surface_alloc_flags; GST_DEBUG_OBJECT (pool, "config %" GST_PTR_FORMAT, config); caps = NULL; if (!gst_buffer_pool_config_get_params (config, &caps, &size, &min_buffers, &max_buffers)) goto error_invalid_config; if (!caps) goto error_no_caps; if (!gst_video_info_from_caps (&new_allocation_vinfo, caps)) goto error_invalid_caps; allocator = NULL; if (!gst_buffer_pool_config_get_allocator (config, &allocator, NULL)) goto error_invalid_allocator; /* it is a valid allocator? */ if (allocator && (g_strcmp0 (allocator->mem_type, GST_VAAPI_VIDEO_MEMORY_NAME) != 0 && g_strcmp0 (allocator->mem_type, GST_VAAPI_DMABUF_ALLOCATOR_NAME) != 0)) allocator = NULL; /* get the allocator properties */ if (allocator) { priv->use_dmabuf_memory = gst_vaapi_is_dmabuf_allocator (allocator); negotiated_vinfo = gst_allocator_get_vaapi_negotiated_video_info (allocator); allocator_vinfo = gst_allocator_get_vaapi_video_info (allocator, &surface_alloc_flags); } else { priv->use_dmabuf_memory = FALSE; negotiated_vinfo = NULL; allocator_vinfo = NULL; surface_alloc_flags = 0; } /* reset or update the allocator if video resolution changed */ if (allocator_vinfo && gst_video_info_changed (allocator_vinfo, &new_allocation_vinfo)) { gst_object_replace ((GstObject **) & priv->allocator, NULL); if (allocator && priv->use_dmabuf_memory) { gst_allocator_set_vaapi_video_info (allocator, &new_allocation_vinfo, surface_alloc_flags); } else { allocator = NULL; } } if (!gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) goto error_no_vaapi_video_meta_option; /* create a new allocator if needed */ if (!allocator) { if (priv->use_dmabuf_memory) { allocator = gst_vaapi_dmabuf_allocator_new (priv->display, &new_allocation_vinfo, /* FIXME: */ 0, GST_PAD_SRC); } else { allocator = gst_vaapi_video_allocator_new (priv->display, &new_allocation_vinfo, surface_alloc_flags, 0); } if (!allocator) goto error_no_allocator; if (negotiated_vinfo) { gst_allocator_set_vaapi_negotiated_video_info (allocator, negotiated_vinfo); } GST_INFO_OBJECT (pool, "created new allocator %" GST_PTR_FORMAT, allocator); gst_buffer_pool_config_set_allocator (config, allocator, NULL); gst_object_unref (allocator); } /* use the allocator and set the video info for the vmeta */ if (allocator) { if (priv->allocator) gst_object_unref (priv->allocator); if ((priv->allocator = allocator)) gst_object_ref (allocator); negotiated_vinfo = gst_allocator_get_vaapi_negotiated_video_info (priv->allocator); allocator_vinfo = gst_allocator_get_vaapi_video_info (allocator, NULL); priv->vmeta_vinfo = (negotiated_vinfo) ? *negotiated_vinfo : *allocator_vinfo; /* last resource to set the correct buffer size */ if (GST_VIDEO_INFO_SIZE (allocator_vinfo) != size) { gst_buffer_pool_config_set_params (config, caps, GST_VIDEO_INFO_SIZE (allocator_vinfo), min_buffers, max_buffers); } } if (!priv->allocator) goto error_no_allocator; priv->options = 0; if (gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META)) { priv->options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_VIDEO_META; } else { gint i; for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&new_allocation_vinfo); i++) { if (GST_VIDEO_INFO_PLANE_OFFSET (&new_allocation_vinfo, i) != GST_VIDEO_INFO_PLANE_OFFSET (&priv->vmeta_vinfo, i) || GST_VIDEO_INFO_PLANE_STRIDE (&new_allocation_vinfo, i) != GST_VIDEO_INFO_PLANE_STRIDE (&priv->vmeta_vinfo, i)) { priv->options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_VIDEO_META; gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); updated = TRUE; break; } } } if (gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) { fill_video_alignment (GST_VAAPI_VIDEO_BUFFER_POOL (pool), &align); gst_buffer_pool_config_set_video_alignment (config, &align); } if (!priv->use_dmabuf_memory && gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META)) priv->options |= GST_VAAPI_VIDEO_BUFFER_POOL_OPTION_GL_TEXTURE_UPLOAD; ret = GST_BUFFER_POOL_CLASS (gst_vaapi_video_buffer_pool_parent_class)->set_config (pool, config); return !updated && ret; /* ERRORS */ error_invalid_config: { GST_ERROR_OBJECT (pool, "invalid config"); return FALSE; } error_no_caps: { GST_ERROR_OBJECT (pool, "no caps in config"); return FALSE; } error_invalid_caps: { GST_ERROR_OBJECT (pool, "invalid caps %" GST_PTR_FORMAT, caps); return FALSE; } error_invalid_allocator: { GST_ERROR_OBJECT (pool, "no allocator in config"); return FALSE; } error_no_vaapi_video_meta_option: { GST_ERROR_OBJECT (pool, "no GstVaapiVideoMeta option in config"); return FALSE; } error_no_allocator: { GST_ERROR_OBJECT (pool, "no allocator defined"); return FALSE; } }
virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived (IDeckLinkVideoInputFrame * video_frame, IDeckLinkAudioInputPacket * audio_packet) { GstElement *videosrc = NULL, *audiosrc = NULL; void (*got_video_frame) (GstElement * videosrc, IDeckLinkVideoInputFrame * frame, GstDecklinkModeEnum mode, GstClockTime capture_time, GstClockTime capture_duration) = NULL; void (*got_audio_packet) (GstElement * videosrc, IDeckLinkAudioInputPacket * packet, GstClockTime capture_time) = NULL; GstDecklinkModeEnum mode; BMDTimeValue capture_time, capture_duration; HRESULT res; res = video_frame->GetHardwareReferenceTimestamp (GST_SECOND, &capture_time, &capture_duration); if (res != S_OK) { GST_ERROR ("Failed to get capture time: 0x%08x", res); capture_time = GST_CLOCK_TIME_NONE; capture_duration = GST_CLOCK_TIME_NONE; } g_mutex_lock (&m_input->lock); if (capture_time > (BMDTimeValue) m_input->clock_start_time) capture_time -= m_input->clock_start_time; else capture_time = 0; if (capture_time > (BMDTimeValue) m_input->clock_offset) capture_time -= m_input->clock_offset; else capture_time = 0; if (m_input->videosrc) { videosrc = GST_ELEMENT_CAST (gst_object_ref (m_input->videosrc)); got_video_frame = m_input->got_video_frame; } mode = gst_decklink_get_mode_enum_from_bmd (m_input->mode->mode); if (m_input->audiosrc) { audiosrc = GST_ELEMENT_CAST (gst_object_ref (m_input->audiosrc)); got_audio_packet = m_input->got_audio_packet; } g_mutex_unlock (&m_input->lock); if (got_video_frame && videosrc) { got_video_frame (videosrc, video_frame, mode, capture_time, capture_duration); } if (got_audio_packet && audiosrc) { m_input->got_audio_packet (audiosrc, audio_packet, capture_time); } gst_object_replace ((GstObject **) & videosrc, NULL); gst_object_replace ((GstObject **) & audiosrc, NULL); return S_OK; }
static GstFlowReturn gst_omx_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer, GstBufferPoolAcquireParams * params) { GstOMXBufferPool *pool = GST_OMX_BUFFER_POOL (bpool); GstBuffer *buf; GstOMXBuffer *omx_buf; g_return_val_if_fail (pool->allocating, GST_FLOW_ERROR); omx_buf = g_ptr_array_index (pool->port->buffers, pool->current_buffer_index); g_return_val_if_fail (omx_buf != NULL, GST_FLOW_ERROR); if (pool->other_pool) { guint i, n; buf = g_ptr_array_index (pool->buffers, pool->current_buffer_index); g_assert (pool->other_pool == buf->pool); gst_object_replace ((GstObject **) & buf->pool, NULL); n = gst_buffer_n_memory (buf); for (i = 0; i < n; i++) { GstMemory *mem = gst_buffer_peek_memory (buf, i); /* FIXME: We don't allow sharing because we need to know * when the memory becomes unused and can only then put * it back to the pool. Which is done in the pool's release * function */ GST_MINI_OBJECT_FLAG_SET (mem, GST_MEMORY_FLAG_NO_SHARE); } if (pool->add_videometa) { GstVideoMeta *meta; meta = gst_buffer_get_video_meta (buf); if (!meta) { gst_buffer_add_video_meta (buf, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (&pool->video_info), GST_VIDEO_INFO_WIDTH (&pool->video_info), GST_VIDEO_INFO_HEIGHT (&pool->video_info)); } } pool->need_copy = FALSE; } else { GstMemory *mem; const guint nstride = pool->port->port_def.format.video.nStride; const guint nslice = pool->port->port_def.format.video.nSliceHeight; gsize offset[GST_VIDEO_MAX_PLANES] = { 0, }; gint stride[GST_VIDEO_MAX_PLANES] = { nstride, 0, }; mem = gst_omx_memory_allocator_alloc (pool->allocator, 0, omx_buf); buf = gst_buffer_new (); gst_buffer_append_memory (buf, mem); g_ptr_array_add (pool->buffers, buf); switch (GST_VIDEO_INFO_FORMAT (&pool->video_info)) { case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_RGB16: case GST_VIDEO_FORMAT_BGR16: case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_UYVY: case GST_VIDEO_FORMAT_YVYU: case GST_VIDEO_FORMAT_GRAY8: break; case GST_VIDEO_FORMAT_I420: stride[1] = nstride / 2; offset[1] = offset[0] + stride[0] * nslice; stride[2] = nstride / 2; offset[2] = offset[1] + (stride[1] * nslice / 2); break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV16: stride[1] = nstride; offset[1] = offset[0] + stride[0] * nslice; break; default: g_assert_not_reached (); break; } if (pool->add_videometa) { pool->need_copy = FALSE; } else { GstVideoInfo info; gboolean need_copy = FALSE; gint i; gst_video_info_init (&info); gst_video_info_set_format (&info, GST_VIDEO_INFO_FORMAT (&pool->video_info), GST_VIDEO_INFO_WIDTH (&pool->video_info), GST_VIDEO_INFO_HEIGHT (&pool->video_info)); for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&pool->video_info); i++) { if (info.stride[i] != stride[i] || info.offset[i] != offset[i]) { need_copy = TRUE; break; } } pool->need_copy = need_copy; } if (pool->need_copy || pool->add_videometa) { /* We always add the videometa. It's the job of the user * to copy the buffer if pool->need_copy is TRUE */ gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_INFO_FORMAT (&pool->video_info), GST_VIDEO_INFO_WIDTH (&pool->video_info), GST_VIDEO_INFO_HEIGHT (&pool->video_info), GST_VIDEO_INFO_N_PLANES (&pool->video_info), offset, stride); } } gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (buf), gst_omx_buffer_data_quark, omx_buf, NULL); *buffer = buf; pool->current_buffer_index++; return GST_FLOW_OK; }
static gboolean gst_msdkdec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query) { GstMsdkDec *thiz = GST_MSDKDEC (decoder); GstBufferPool *pool = NULL; GstStructure *pool_config = NULL; GstCaps *pool_caps /*, *negotiated_caps */ ; guint size, min_buffers, max_buffers; if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder, query)) return FALSE; /* Get the buffer pool config decided by the base class. The base class ensures that there will always be at least a 0th pool in the query. */ gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL); pool_config = gst_buffer_pool_get_config (pool); /* Get the caps of pool and increase the min and max buffers by async_depth, * we will always have that number of decode operations in-flight */ gst_buffer_pool_config_get_params (pool_config, &pool_caps, &size, &min_buffers, &max_buffers); min_buffers += thiz->async_depth; if (max_buffers) max_buffers += thiz->async_depth; /* increase the min_buffers by 1 for smooth display in render pipeline */ min_buffers += 1; /* this will get updated with msdk requirement */ thiz->min_prealloc_buffers = min_buffers; if (_gst_caps_has_feature (pool_caps, GST_CAPS_FEATURE_MEMORY_DMABUF)) { GST_INFO_OBJECT (decoder, "This MSDK decoder uses DMABuf memory"); thiz->use_video_memory = thiz->use_dmabuf = TRUE; } /* Initialize MSDK decoder before new bufferpool tries to alloc each buffer, * which requires information of frame allocation. * No effect if already initialized. */ if (!gst_msdkdec_init_decoder (thiz)) return FALSE; /* get the updated min_buffers which account the msdk requirement too */ min_buffers = thiz->min_prealloc_buffers; /* Decoder always use its own pool. So we create a pool if msdk apis * previously requested for allocation (do_realloc = TRUE) */ if (thiz->do_realloc || !thiz->pool) { if (thiz->pool) gst_object_replace ((GstObject **) & thiz->pool, NULL); GST_INFO_OBJECT (decoder, "create new MSDK bufferpool"); thiz->pool = gst_msdkdec_create_buffer_pool (thiz, &thiz->output_info, min_buffers); if (!thiz->pool) goto failed_to_create_pool; } if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL) && gst_buffer_pool_has_option (pool, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT)) { GstStructure *config; GstAllocator *allocator; /* If downstream supports video meta and video alignment, * we can replace our own msdk bufferpool and use it */ /* Remove downstream's pool */ gst_structure_free (pool_config); gst_object_unref (pool); pool = gst_object_ref (thiz->pool); /* Set the allocator of new msdk bufferpool */ config = gst_buffer_pool_get_config (GST_BUFFER_POOL_CAST (pool)); if (gst_buffer_pool_config_get_allocator (config, &allocator, NULL)) gst_query_set_nth_allocation_param (query, 0, allocator, NULL); gst_structure_free (config); } else { /* Unfortunately, dowstream doesn't have videometa or alignment support, * we keep msdk pool as a side-pool that will be decoded into and * then copied from. */ GST_INFO_OBJECT (decoder, "Keep MSDK bufferpool as a side-pool"); /* Update params to downstream's pool */ gst_buffer_pool_config_set_params (pool_config, pool_caps, size, min_buffers, max_buffers); if (!gst_buffer_pool_set_config (pool, pool_config)) goto error_set_config; gst_video_info_from_caps (&thiz->non_msdk_pool_info, pool_caps); /* update width and height with actual negotiated values */ GST_VIDEO_INFO_WIDTH (&thiz->non_msdk_pool_info) = GST_VIDEO_INFO_WIDTH (&thiz->output_info); GST_VIDEO_INFO_HEIGHT (&thiz->non_msdk_pool_info) = GST_VIDEO_INFO_HEIGHT (&thiz->output_info); } gst_query_set_nth_allocation_pool (query, 0, pool, size, min_buffers, max_buffers); if (pool) gst_object_unref (pool); return TRUE; failed_to_create_pool: GST_ERROR_OBJECT (decoder, "failed to set buffer pool config"); if (pool) gst_object_unref (pool); return FALSE; error_set_config: GST_ERROR_OBJECT (decoder, "failed to set buffer pool config"); if (pool) gst_object_unref (pool); return FALSE; }
static gboolean gst_vaapi_video_buffer_pool_set_config (GstBufferPool * pool, GstStructure * config) { GstVaapiVideoBufferPoolPrivate *const priv = GST_VAAPI_VIDEO_BUFFER_POOL (pool)->priv; GstCaps *caps = NULL; GstVideoInfo *const cur_vip = &priv->video_info[priv->video_info_index]; GstVideoInfo *const new_vip = &priv->video_info[!priv->video_info_index]; GstVideoAlignment align; GstAllocator *allocator; gboolean changed_caps, use_dmabuf_memory, ret, updated = FALSE; if (!gst_buffer_pool_config_get_params (config, &caps, NULL, NULL, NULL)) goto error_invalid_config; if (!caps || !gst_video_info_from_caps (new_vip, caps)) goto error_no_caps; use_dmabuf_memory = gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_DMABUF_MEMORY); if (priv->use_dmabuf_memory != use_dmabuf_memory) { priv->use_dmabuf_memory = use_dmabuf_memory; g_clear_object (&priv->allocator); } changed_caps = !priv->allocator || GST_VIDEO_INFO_FORMAT (cur_vip) != GST_VIDEO_INFO_FORMAT (new_vip) || GST_VIDEO_INFO_WIDTH (cur_vip) != GST_VIDEO_INFO_WIDTH (new_vip) || GST_VIDEO_INFO_HEIGHT (cur_vip) != GST_VIDEO_INFO_HEIGHT (new_vip); if (changed_caps) { const GstVideoInfo *alloc_vip; guint flags = 0; if (use_dmabuf_memory) { /* XXX: also needs fixed strides/offsets */ flags |= GST_VAAPI_SURFACE_ALLOC_FLAG_LINEAR_STORAGE; allocator = gst_vaapi_dmabuf_allocator_new (priv->display, new_vip, flags); } else { allocator = gst_vaapi_video_allocator_new (priv->display, new_vip, 0); } if (!allocator) goto error_create_allocator; gst_object_replace ((GstObject **) & priv->allocator, GST_OBJECT_CAST (allocator)); gst_object_unref (allocator); priv->video_info_index ^= 1; alloc_vip = gst_allocator_get_vaapi_video_info (allocator, NULL); if (!alloc_vip) goto error_create_allocator_info; priv->alloc_info = *alloc_vip; } if (!gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VAAPI_VIDEO_META)) goto error_no_vaapi_video_meta_option; priv->has_video_meta = gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); if (!priv->has_video_meta) { gint i; for (i = 0; i < GST_VIDEO_INFO_N_PLANES (new_vip); i++) { if (GST_VIDEO_INFO_PLANE_OFFSET (new_vip, i) != GST_VIDEO_INFO_PLANE_OFFSET (&priv->alloc_info, i) || GST_VIDEO_INFO_PLANE_STRIDE (new_vip, i) != GST_VIDEO_INFO_PLANE_STRIDE (&priv->alloc_info, i)) { priv->has_video_meta = TRUE; gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); updated = TRUE; break; } } } priv->has_video_alignment = gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); if (priv->has_video_alignment) { fill_video_alignment (GST_VAAPI_VIDEO_BUFFER_POOL (pool), &align); gst_buffer_pool_config_set_video_alignment (config, &align); } priv->has_texture_upload_meta = gst_buffer_pool_config_has_option (config, GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META); ret = GST_BUFFER_POOL_CLASS (gst_vaapi_video_buffer_pool_parent_class)->set_config (pool, config); return !updated && ret; /* ERRORS */ error_invalid_config: { GST_ERROR ("invalid config"); return FALSE; } error_no_caps: { GST_ERROR ("no valid caps in config"); return FALSE; } error_create_allocator: { GST_ERROR ("failed to create GstVaapiVideoAllocator object"); return FALSE; } error_create_allocator_info: { GST_ERROR ("failed to create GstVaapiVideoAllocator `video-info'"); return FALSE; } error_no_vaapi_video_meta_option: { GST_ERROR ("no GstVaapiVideoMeta option"); return FALSE; } }