static gboolean sync_bus_call (GstBus * bus, GstMessage * msg, gpointer data) { switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_NEED_CONTEXT: { const gchar *context_type; gst_message_parse_context_type (msg, &context_type); g_print ("got need context %s\n", context_type); if (g_strcmp0 (context_type, GST_GL_DISPLAY_CONTEXT_TYPE) == 0) { GstContext *display_context = gst_context_new (GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); gst_context_set_gl_display (display_context, sdl_gl_display); gst_element_set_context (GST_ELEMENT (msg->src), display_context); return TRUE; } else if (g_strcmp0 (context_type, "gst.gl.app_context") == 0) { GstContext *app_context = gst_context_new ("gst.gl.app_context", TRUE); GstStructure *s = gst_context_writable_structure (app_context); gst_structure_set (s, "context", GST_GL_TYPE_CONTEXT, sdl_context, NULL); gst_element_set_context (GST_ELEMENT (msg->src), app_context); return TRUE; } break; } default: break; } return FALSE; }
void MediaPlayerPrivateGStreamerBase::handleNeedContextMessage(GstMessage* message) { #if USE(GSTREAMER_GL) const gchar* contextType; gst_message_parse_context_type(message, &contextType); if (!ensureGstGLContext()) return; if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) { GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); gst_context_set_gl_display(displayContext, m_glDisplay.get()); gst_element_set_context(GST_ELEMENT(message->src), displayContext); return; } if (!g_strcmp0(contextType, "gst.gl.app_context")) { GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE); GstStructure* structure = gst_context_writable_structure(appContext); gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_glContext.get(), nullptr); gst_element_set_context(GST_ELEMENT(message->src), appContext); return; } #else UNUSED_PARAM(message); #endif // USE(GSTREAMER_GL) }
GstQuery * gst_vulkan_local_context_query (GstElement * element, const gchar * context_type, gboolean set_context) { GstQuery *query; GstContext *ctxt; _init_context_debug (); /* 2a) Query downstream with GST_QUERY_CONTEXT for the context and * check if downstream already has a context of the specific type * 2b) Query upstream as above. */ query = gst_query_new_context (context_type); if (gst_vulkan_run_query (element, query, GST_PAD_SRC)) { gst_query_parse_context (query, &ctxt); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "found context (%p) in downstream query", ctxt); if (set_context) gst_element_set_context (element, ctxt); } else if (gst_vulkan_run_query (element, query, GST_PAD_SINK)) { gst_query_parse_context (query, &ctxt); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "found context (%p) in upstream query", ctxt); if (set_context) gst_element_set_context (element, ctxt); } else { gst_query_unref (query); query = NULL; } return query; }
static void test_audio_context_configures_buffer_size (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_READY); gst_element_set_context (p, ctx); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); // sizeof(gint16) * (int)(0.5 + (44100 * (60.0 / 8)) / (120 * 4)) ck_assert_uint_eq (bf->size, 1378); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (bus); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static void _gst_context_query (GstElement * element, const gchar * display_type) { GstQuery *query; GstContext *ctxt; _init_context_debug (); /* 2a) Query downstream with GST_QUERY_CONTEXT for the context and * check if downstream already has a context of the specific type * 2b) Query upstream as above. */ query = gst_query_new_context (display_type); if (gst_gl_run_query (element, query, GST_PAD_SRC)) { gst_query_parse_context (query, &ctxt); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "found context (%p) in downstream query", ctxt); gst_element_set_context (element, ctxt); } else if (gst_gl_run_query (element, query, GST_PAD_SINK)) { gst_query_parse_context (query, &ctxt); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "found context (%p) in upstream query", ctxt); gst_element_set_context (element, ctxt); } else { /* 3) Post a GST_MESSAGE_NEED_CONTEXT message on the bus with * the required context type and afterwards check if a * usable context was set now as in 1). The message could * be handled by the parent bins of the element and the * application. */ GstMessage *msg; GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "posting need context message"); msg = gst_message_new_need_context (GST_OBJECT_CAST (element), display_type); gst_element_post_message (element, msg); } /* * Whomever responds to the need-context message performs a * GstElement::set_context() with the required context in which the element * is required to update the display_ptr or call gst_gl_handle_set_context(). */ gst_query_unref (query); }
static GstStateChangeReturn gst_context_element_change_state (GstElement * element, GstStateChange transition) { GstContextElement *celement = (GstContextElement *) element; if (transition == GST_STATE_CHANGE_NULL_TO_READY) { GstContext *context; GstMessage *msg; gboolean have_foobar = celement->have_foobar; if (celement->set_before_ready && !have_foobar) return GST_STATE_CHANGE_FAILURE; else if (celement->set_before_ready) return GST_ELEMENT_CLASS (gst_context_element_parent_class)->change_state (element, transition); if (celement->set_from_need_context && have_foobar) return GST_STATE_CHANGE_FAILURE; if (!have_foobar) { /* Here we would first query downstream for a context but we have no pads */ msg = gst_message_new_need_context (GST_OBJECT (element), "foobar"); gst_element_post_message (element, msg); have_foobar = celement->have_foobar; } if (celement->set_from_need_context && !have_foobar) return GST_STATE_CHANGE_FAILURE; else if (celement->set_from_need_context) return GST_ELEMENT_CLASS (gst_context_element_parent_class)->change_state (element, transition); if (celement->create_self && have_foobar) return GST_STATE_CHANGE_FAILURE; if (!have_foobar) { context = gst_context_new ("foobar", FALSE); gst_element_set_context (element, context); msg = gst_message_new_have_context (GST_OBJECT (element), gst_context_ref (context)); gst_element_post_message (element, msg); gst_context_unref (context); } return GST_ELEMENT_CLASS (gst_context_element_parent_class)->change_state (element, transition); } return GST_ELEMENT_CLASS (gst_context_element_parent_class)->change_state (element, transition); }
/* * @element: (transfer none): * @context: (transfer full): */ static void _vk_context_propagate (GstElement * element, GstContext * context) { GstMessage *msg; _init_context_debug (); gst_element_set_context (element, context); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "posting have context (%" GST_PTR_FORMAT ") message", context); msg = gst_message_new_have_context (GST_OBJECT_CAST (element), context); gst_element_post_message (GST_ELEMENT_CAST (element), msg); }
/* 5) Create a context by itself and post a GST_MESSAGE_HAVE_CONTEXT message on the bus. */ void gst_vaapi_video_context_propagate (GstElement * element, GstVaapiDisplay * display) { GstContext *context; GstMessage *msg; context = gst_vaapi_video_context_new_with_display (display, FALSE); gst_element_set_context (element, context); _init_context_debug (); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "posting `have-context' (%p) message with display (%p)", context, display); msg = gst_message_new_have_context (GST_OBJECT_CAST (element), context); gst_element_post_message (GST_ELEMENT_CAST (element), msg); }
static GstBusSyncReply sync_handler (GstBus * bus, GstMessage * message, gpointer user_data) { if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_NEED_CONTEXT) { const gchar *type; GstElement *element = GST_ELEMENT (GST_MESSAGE_SRC (message)); GstContext *context; fail_unless (gst_message_parse_context_type (message, &type)); fail_unless_equals_string (type, "foobar"); context = gst_context_new ("foobar", FALSE); gst_element_set_context (element, context); gst_context_unref (context); } return GST_BUS_PASS; }
static gboolean _gst_context_get_from_query (GstElement * element, GstQuery * query, GstPadDirection direction) { GstContext *ctxt; if (!_gst_context_run_query (element, query, direction)) return FALSE; gst_query_parse_context (query, &ctxt); if (!ctxt) return FALSE; GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "found context (%" GST_PTR_FORMAT ") in %s query", ctxt, direction == GST_PAD_SRC ? "downstream" : "upstream"); gst_element_set_context (element, ctxt); return TRUE; }
static void test_num_buffers_with_stop_pos (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GstSeekFlags seek_flags[] = { GST_SEEK_FLAG_FLUSH, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SEGMENT }; GstMessageType end_msg[] = { GST_MESSAGE_EOS, GST_MESSAGE_SEGMENT_DONE }; GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_READY); gst_element_set_context (p, ctx); gst_element_set_state (p, GST_STATE_PAUSED); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_element_seek (p, 1.0, GST_FORMAT_TIME, seek_flags[_i], GST_SEEK_TYPE_SET, G_GUINT64_CONSTANT (0), GST_SEEK_TYPE_SET, ticktime * 2); gst_element_set_state (p, GST_STATE_PLAYING); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_bus_poll (bus, end_msg[_i] | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); gint num_buffers = g_list_length (e->buffer_info); ck_assert_uint_eq (num_buffers, 2); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
/* 4) Create a context by itself and post a GST_MESSAGE_HAVE_CONTEXT * message. */ static void gst_gl_display_context_propagate (GstElement * element, GstGLDisplay * display) { GstContext *context; GstMessage *msg; if (!display) { GST_ERROR_OBJECT (element, "Could not get GL display connection"); return; } _init_context_debug (); context = gst_context_new (GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); gst_context_set_gl_display (context, display); gst_element_set_context (element, context); GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element, "posting have context (%p) message with display (%p)", context, display); msg = gst_message_new_have_context (GST_OBJECT_CAST (element), context); gst_element_post_message (GST_ELEMENT_CAST (element), msg); }
static void test_last_buffer_is_clipped (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_READY); gst_element_set_context (p, ctx); gst_element_set_state (p, GST_STATE_PAUSED); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_element_seek (p, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, G_GUINT64_CONSTANT (0), GST_SEEK_TYPE_SET, ticktime * 1.5); gst_element_set_state (p, GST_STATE_PLAYING); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf0 = get_buffer_info (e, 0); BufferFields *bf1 = get_buffer_info (e, 1); ck_assert_uint64_le (bf1->duration, bf0->duration / 2); ck_assert_uint_le (bf1->size, bf0->size / 2); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static void test_backwards_last_buffer_ends_at_zero (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_READY); gst_element_set_context (p, ctx); gst_element_set_state (p, GST_STATE_PAUSED); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_element_seek (p, -1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, G_GUINT64_CONSTANT (0), GST_SEEK_TYPE_SET, ticktime); gst_element_set_state (p, GST_STATE_PLAYING); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); ck_assert_uint64_eq (bf->ts, 0); ck_assert_uint64_eq (bf->offset, 0); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }