void Pipeline::configure() { gst_init (NULL, NULL); #ifdef Q_WS_WIN m_loop = g_main_loop_new (NULL, FALSE); #endif if(m_videoLocation.isEmpty()) { qDebug("No video file specified. Using video test source."); m_pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! " "video/x-raw-yuv, width=640, height=480, " "framerate=(fraction)30/1 ! " "glupload ! gleffects effect=5 ! fakesink sync=1", NULL)); } else { qDebug("Loading video: %s", m_videoLocation.toAscii().data()); m_pipeline = GST_PIPELINE (gst_parse_launch (QString("filesrc location=%1 ! decodebin2 ! " "glupload ! gleffects effect=5 ! " "fakesink sync=1").arg(m_videoLocation).toAscii(), NULL)); } m_bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); gst_bus_add_watch(m_bus, (GstBusFunc) bus_call, this); gst_object_unref(m_bus); /* Retrieve the last gl element */ GstElement *gl_element = gst_bin_get_by_name(GST_BIN(m_pipeline), "gleffects0"); if(!gl_element) { qDebug ("gl element could not be found"); return; } g_object_set(G_OBJECT (gl_element), "external-opengl-context", this->glctx.contextId, NULL); g_object_unref(gl_element); gst_element_set_state(GST_ELEMENT(this->m_pipeline), GST_STATE_PAUSED); GstState state = GST_STATE_PAUSED; if(gst_element_get_state(GST_ELEMENT(this->m_pipeline), &state, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) { qDebug("failed to pause pipeline"); return; } }
static void test_initialized_with_audio_caps (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); fail_unless (e->caps != NULL, NULL); gint i, cs = gst_caps_get_size (e->caps); fail_unless (cs > 0, NULL); for (i = 0; i < cs; i++) { fail_unless (gst_structure_has_name (gst_caps_get_structure (e->caps, i), "audio/x-raw"), NULL); } GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
int main (int argc, char **argv) { GstElement *pipeline; GstState state; GError *error = NULL; gst_init (&argc, &argv); pipeline = gst_parse_launch ("ximagesrc ! fakesink", &error); if (error) { g_print ("Error while parsing pipeline description: %s\n", error->message); return -1; } loop = g_main_loop_new (NULL, FALSE); gst_element_set_state (pipeline, GST_STATE_PLAYING); /* lets check it gets to PLAYING */ if (gst_element_get_state (pipeline, &state, NULL, GST_CLOCK_TIME_NONE) == GST_STATE_CHANGE_FAILURE || state != GST_STATE_PLAYING) { g_warning ("State change to playing failed"); } /* We want to get out after 5 seconds */ g_timeout_add (5000, (GSourceFunc) terminate_playback, pipeline); g_main_loop_run (loop); g_main_loop_unref (loop); return 0; }
static GstBuffer * create_video_buffer (GstCaps * caps) { GstElement *pipeline; GstElement *cf; GstElement *sink; GstBuffer *buffer; pipeline = gst_parse_launch ("videotestsrc num-buffers=1 ! capsfilter name=cf ! appsink name=sink", NULL); g_assert (pipeline != NULL); cf = gst_bin_get_by_name (GST_BIN (pipeline), "cf"); sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink"); g_object_set (G_OBJECT (cf), "caps", caps, NULL); gst_element_set_state (pipeline, GST_STATE_PLAYING); buffer = gst_app_sink_pull_buffer (GST_APP_SINK (sink)); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); gst_object_unref (sink); gst_object_unref (cf); return buffer; }
static gboolean setup_pipeline_element (GstElement * element, const gchar * property_name, const gchar * element_name, GstElement ** res_elem) { gboolean res = TRUE; GstElement *elem = NULL; if (element_name) { GError *error = NULL; elem = gst_parse_launch (element_name, &error); if (elem) { g_object_set (element, property_name, elem, NULL); } else { GST_WARNING ("can't create element '%s' for property '%s'", element_name, property_name); if (error) { GST_ERROR ("%s", error->message); g_error_free (error); } res = FALSE; } } else { GST_DEBUG ("no element for property '%s' given", property_name); } if (res_elem) *res_elem = elem; return res; }
void test_parse_launch_errors() { GstElement *pipe; GError *err; const gchar *arr[] = { "fakesrc", "fakesink", NULL }; std_log(LOG_FILENAME_LINE, "Test started test_parse_launch_errors"); err = NULL; pipe = gst_parse_launch ("fakesrc ! fakesink", &err); fail_unless (err != NULL, "expected an error, but did not get one"); fail_unless (pipe == NULL, "got pipeline, but expected NULL"); fail_unless (err->domain == GST_CORE_ERROR); fail_unless (err->code == GST_CORE_ERROR_DISABLED); g_error_free (err); err = NULL; pipe = gst_parse_bin_from_description ("fakesrc ! fakesink", TRUE, &err); fail_unless (err != NULL, "expected an error, but did not get one"); fail_unless (pipe == NULL, "got pipeline, but expected NULL"); fail_unless (err->domain == GST_CORE_ERROR); fail_unless (err->code == GST_CORE_ERROR_DISABLED); g_error_free (err); err = NULL; pipe = gst_parse_launchv (arr, &err); fail_unless (err != NULL, "expected an error, but did not get one"); fail_unless (pipe == NULL, "got pipeline, but expected NULL"); fail_unless (err->domain == GST_CORE_ERROR); fail_unless (err->code == GST_CORE_ERROR_DISABLED); g_error_free (err); std_log(LOG_FILENAME_LINE, "Test Successful"); create_xml(0); }
/* Stolen from gst-plugins-good/ext/gconf/gconf.c */ static GstElement * my_gst_gconf_render_bin_from_description (const gchar * description) { GstElement *bin = NULL; GstPad *pad = NULL; GError *error = NULL; gchar *desc = NULL; /* parse the pipeline to a bin */ desc = g_strdup_printf ("bin.( %s )", description); bin = GST_ELEMENT (gst_parse_launch (desc, &error)); g_free (desc); if (error) { GST_ERROR ("gstgconf: error parsing pipeline %s\n%s\n", description, error->message); g_error_free (error); return NULL; } /* find pads and ghost them if necessary */ if ((pad = my_gst_bin_find_unconnected_pad (GST_BIN (bin), GST_PAD_SRC))) { gst_element_add_pad (bin, gst_ghost_pad_new ("src", pad)); } if ((pad = my_gst_bin_find_unconnected_pad (GST_BIN (bin), GST_PAD_SINK))) { gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); } return bin; }
int main(int argc, char *argv[]) { GstElement *pipeline; GstBus *bus; GstMessage *msg; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Build the pipeline */ pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
bool ofGstUtils::setPipelineWithSink(string pipeline){ bHavePixelsChanged = false; bIsCustomWithSink = true; gstData.loop = g_main_loop_new (NULL, FALSE); gchar* pipeline_string = g_strdup((pipeline).c_str()); GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string); if(error!=NULL){ ofLog(OF_LOG_ERROR,"couldnt create pipeline: " + string(error->message)); return false; } gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); return startPipeline(); }
/* Main method for the native code. This is executed on its own thread. */ static void *app_function (void *userdata) { JavaVMAttachArgs args; GstBus *bus; CustomData *data = (CustomData *)userdata; GSource *bus_source; GError *error = NULL; GST_DEBUG ("Creating pipeline in CustomData at %p", data); /* Create our own GLib Main Context and make it the default one */ data->context = g_main_context_new (); g_main_context_push_thread_default(data->context); /* Build pipeline */ data->pipeline = gst_parse_launch("videotestsrc ! warptv ! videoconvert ! autovideosink", &error); if (error) { gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message); g_clear_error (&error); set_ui_message(message, data); g_free (message); return NULL; } /* Set the pipeline to READY, so it can already accept a window handle, if we have one */ gst_element_set_state(data->pipeline, GST_STATE_READY); data->video_sink = gst_bin_get_by_interface(GST_BIN(data->pipeline), GST_TYPE_VIDEO_OVERLAY); if (!data->video_sink) { GST_ERROR ("Could not retrieve video sink"); return NULL; } /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ bus = gst_element_get_bus (data->pipeline); bus_source = gst_bus_create_watch (bus); g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL); g_source_attach (bus_source, data->context); g_source_unref (bus_source); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data); g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data); gst_object_unref (bus); /* Create a GLib Main Loop and set it to run */ GST_DEBUG ("Entering main loop... (CustomData:%p)", data); data->main_loop = g_main_loop_new (data->context, FALSE); check_initialization_complete (data); g_main_loop_run (data->main_loop); GST_DEBUG ("Exited main loop"); g_main_loop_unref (data->main_loop); data->main_loop = NULL; /* Free resources */ g_main_context_pop_thread_default(data->context); g_main_context_unref (data->context); gst_element_set_state (data->pipeline, GST_STATE_NULL); gst_object_unref (data->video_sink); gst_object_unref (data->pipeline); return NULL; }
static GstPipeline * hls_test_create_pipeline (InsanityGstPipelineTest * ptest, gpointer userdata) { GstElement *pipeline = NULL; const char *launch_line = "playbin audio-sink=\"fakesink name=asink\" video-sink=\"fakesink name=vsink\""; GError *error = NULL; pipeline = gst_parse_launch (launch_line, &error); if (!pipeline) { insanity_test_validate_checklist_item (INSANITY_TEST (ptest), "valid-pipeline", FALSE, error ? error->message : NULL); if (error) g_error_free (error); return NULL; } else if (error) { /* Do we get a dangling pointer here ? gst-launch.c does not unref */ pipeline = NULL; insanity_test_validate_checklist_item (INSANITY_TEST (ptest), "valid-pipeline", FALSE, error->message); g_error_free (error); return NULL; } g_signal_connect (pipeline, "source-setup", G_CALLBACK (source_setup_cb), NULL); glob_pipeline = pipeline; return GST_PIPELINE (pipeline); }
static void test_audio_context_configures_buffer_size (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_READY); gst_element_set_context (p, ctx); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); // sizeof(gint16) * (int)(0.5 + (44100 * (60.0 / 8)) / (120 * 4)) ck_assert_uint_eq (bf->size, 1378); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (bus); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static GstPipeline * blank_gst_test_create_pipeline (InsanityGstPipelineTest * ptest, gpointer userdata) { GstPipeline *pipeline = NULL; GValue launch_line = { 0 }; GError *error = NULL; if (insanity_test_get_argument (INSANITY_TEST (ptest), "pipeline-launch-line", &launch_line)) { pipeline = GST_PIPELINE (gst_parse_launch (g_value_get_string (&launch_line), &error)); g_value_unset (&launch_line); if (!pipeline) { insanity_test_validate_checklist_item (INSANITY_TEST (ptest), "valid-pipeline", FALSE, error ? error->message : NULL); if (error) g_error_free (error); } else if (error) { /* Do we get a dangling pointer here ? gst-launch.c does not unref */ pipeline = NULL; insanity_test_validate_checklist_item (INSANITY_TEST (ptest), "valid-pipeline", FALSE, error->message); g_error_free (error); } } return pipeline; }
void MyGstreamer::start() { qDebug("Gstreamer Started..."); GstElement *pipeline; GstBus *bus; GstMessage *msg; /* Initialize GStreamer */ gst_init (0, 0); /* Build the pipeline */ pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); }
JNIEXPORT jint JNICALL Java_gstJNI_pipelineLaunch(JNIEnv *env, jobject thisObj, jstring launch) { //First, we need to convert the JNI string to a char* const char *inCStr = (*env)->GetStringUTFChars(env, launch, NULL); if(inCStr == NULL) return -1; //error check //printf("In C, the received string is: %s\n", inCStr); GstElement *pipeline; GstBus *bus; GstMessage *msg; GError *e; e = NULL; //Initialize GStreamer gst_init (0, NULL); //still not sure what kind of arguments go here pipeline = gst_parse_launch(inCStr, &e); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); (*env)->ReleaseStringUTFChars(env, launch, inCStr); return 0; }
int main(int argc, char *argv[]) { CustomData data; GstBus *bus; /* Initialize cumstom data structure */ memset (&data, 0, sizeof (data)); data.b = 1; /* For waveform generation */ data.d = 1; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create the playbin2 element */ data.pipeline = gst_parse_launch ("playbin2 uri=appsrc://", NULL); g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data); /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ bus = gst_element_get_bus (data.pipeline); gst_bus_add_signal_watch (bus); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data); gst_object_unref (bus); /* Start playing the pipeline */ gst_element_set_state (data.pipeline, GST_STATE_PLAYING); /* Create a GLib Main Loop and set it to run */ data.main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (data.main_loop); /* Free resources */ gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_object_unref (data.pipeline); return 0; }
static void test_reset_on_seek (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PAUSED); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_element_seek (p, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, GST_MSECOND * 100, GST_SEEK_TYPE_SET, GST_MSECOND * 200); gst_element_set_state (p, GST_STATE_PLAYING); gst_element_get_state (p, NULL, NULL, GST_CLOCK_TIME_NONE); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); ck_assert_int_eq (e->num_disconts, 1); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static void test_position_query_time (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=1 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf = get_buffer_info (e, 0); gint64 pos; gboolean res = gst_element_query_position ((GstElement *) e, GST_FORMAT_TIME, &pos); fail_unless (res, NULL); ck_assert_uint64_eq (bf->duration, pos); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static void test_buffers_are_contigous (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *p = gst_parse_launch ("buzztrax-test-audio-synth name=\"src\" num-buffers=2 ! fakesink async=false", NULL); BtTestAudioSynth *e = (BtTestAudioSynth *) gst_bin_get_by_name (GST_BIN (p), "src"); GstBus *bus = gst_element_get_bus (p); GST_INFO ("-- act --"); gst_element_set_state (p, GST_STATE_PLAYING); gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); BufferFields *bf0 = get_buffer_info (e, 0); BufferFields *bf1 = get_buffer_info (e, 1); ck_assert_uint64_eq (bf1->ts, bf0->ts + bf0->duration); ck_assert_uint64_eq (bf1->offset, bf0->offset + bf0->offset_end); GST_INFO ("-- cleanup --"); gst_element_set_state (p, GST_STATE_NULL); gst_object_unref (e); gst_object_unref (p); BT_TEST_END; }
static gboolean init_parse_launch_player (APP_STATE_T * state, const gchar * spipeline) { GstElement *vsink; GError *error = NULL; /* ex: ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \ h264parse ! omxh264dec ! glcolorscale ! fakesink name=vsink" ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \ h264parse ! omxh264dec ! glcolorscale ! \ video/x-raw(memory:EGLImage) ! fakesink name=vsink" ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \ h264parse ! omxh264dec ! glcolorscale ! \ video/x-raw(memory:GLMemory) ! fakesink name=vsink" ./testegl "filesrc location=big_buck_bunny_720p_h264.mov ! qtdemux ! \ h264parse ! omxh264dec ! glcolorscale ! \ video/x-raw(meta:GstVideoGLTextureUploadMeta) ! \ fakesink name=vsink" */ /* pipeline 1 and 2 are the same and the most efficient as glcolorscale * will enter in passthrough mode and testegl will just bind the eglimage * to a gl texture without any copy. */ state->pipeline = gst_parse_launch (spipeline, &error); if (!state->pipeline) { g_printerr ("Unable to instatiate pipeline '%s': %s\n", spipeline, error->message); return FALSE; } vsink = gst_bin_get_by_name (GST_BIN (state->pipeline), "vsink"); if (!vsink) { g_printerr ("Unable to find a fakesink named 'vsink'"); return FALSE; } g_object_set (vsink, "sync", TRUE, "silent", TRUE, "qos", TRUE, "enable-last-sample", FALSE, "max-lateness", 20 * GST_MSECOND, "signal-handoffs", TRUE, NULL); g_signal_connect (vsink, "preroll-handoff", G_CALLBACK (preroll_cb), state); g_signal_connect (vsink, "handoff", G_CALLBACK (buffers_cb), state); gst_pad_add_probe (gst_element_get_static_pad (vsink, "sink"), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, events_cb, state, NULL); gst_pad_add_probe (gst_element_get_static_pad (vsink, "sink"), GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, query_cb, state, NULL); state->vsink = gst_object_ref (vsink); return TRUE; }
gboolean nsc_gstreamer_supports_profile (GMAudioProfile *profile) { GstElement *element; GError *error = NULL; gchar *pipeline; pipeline = g_strdup_printf ("fakesrc ! %s", gm_audio_profile_get_pipeline (profile)); element = gst_parse_launch (pipeline, &error); g_free (pipeline); /* * It is possible for both element and error to be non NULL, * so let's check both. */ if (element) { gst_object_unref (GST_OBJECT (element)); if (error) { g_warning ("Profile warning; %s", error->message); g_error_free (error); } return TRUE; } else { if (error) { g_warning ("Profile error: %s", error->message); g_error_free (error); } return FALSE; } }
MediaInfo::MediaInfo(const std::string& filename) : m_mainloop(), m_pipeline(), m_playbin(), m_fakesink(), m_duration(), m_frames(), m_buffers(), m_bytes(), m_width(), m_height() { m_playbin = gst_parse_launch("uridecodebin name=mysource ! fakesink name=mysink", NULL); m_pipeline = GST_PIPELINE(m_playbin); GstElement* source = gst_bin_get_by_name(GST_BIN(m_pipeline), "mysource"); gchar* uri = g_filename_to_uri(filename.c_str(), NULL, NULL); g_object_set(source, "uri", uri, NULL); g_free(uri); m_fakesink = gst_bin_get_by_name(GST_BIN(m_pipeline), "mysink"); GstBus* bus = gst_pipeline_get_bus(m_pipeline); gst_bus_add_signal_watch(bus); //bus->signal_message().connect(sigc::mem_fun(this, &MediaInfo::on_bus_message)); gst_element_set_state(GST_ELEMENT(m_pipeline), GST_STATE_PAUSED); m_mainloop = g_main_loop_new(NULL, FALSE); g_main_loop_run(m_mainloop); }
bool ofGstUtils::setPipelineWithSink(string pipeline, string sinkname, bool isStream){ ofGstUtils::startGstMainLoop(); gchar* pipeline_string = g_strdup((pipeline).c_str()); GError * error = NULL; gstPipeline = gst_parse_launch (pipeline_string, &error); ofLogNotice("ofGstUtils") << "setPipelineWithSink(): gstreamer pipeline: " << pipeline_string; if(error!=NULL){ ofLogError("ofGstUtils") << "setPipelineWithSink(): couldn't create pipeline: " << error->message; return false; } if(sinkname!=""){ gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),sinkname.c_str()); if(!gstSink){ ofLogError("ofGstUtils") << "setPipelineWithSink(): couldn't get sink from string pipeline"; } } return setPipelineWithSink(gstPipeline,gstSink,isStream); }
static void get_device_data (ofGstDevice &webcam_device) { char *pipeline_desc; GstElement *pipeline; GError *err; GstStateChangeReturn ret; GstMessage *msg; GstBus *bus; { pipeline_desc = g_strdup_printf ("%s name=source device=%s ! fakesink", webcam_device.gstreamer_src, webcam_device.video_device); err = NULL; pipeline = gst_parse_launch (pipeline_desc, &err); if ((pipeline != NULL) && (err == NULL)) { /* Start the pipeline and wait for max. 10 seconds for it to start up */ gst_element_set_state (pipeline, GST_STATE_PLAYING); ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND); /* Check if any error messages were posted on the bus */ bus = gst_element_get_bus (pipeline); msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0); gst_object_unref (bus); if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS)) { GstElement *src; GstPad *pad; char *name; GstCaps *caps; gst_element_set_state (pipeline, GST_STATE_PAUSED); src = gst_bin_get_by_name (GST_BIN (pipeline), "source"); g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL); if (name == NULL) name = "Unknown"; // ofLog(OF_LOG_VERBOSE,"Device: %s (%s)\n", name, webcam_device.video_device); pad = gst_element_get_pad (src, "src"); caps = gst_pad_get_caps (pad); gst_object_unref (pad); get_supported_video_formats (webcam_device, *caps); gst_caps_unref (caps); } gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); } if (err) g_error_free (err); g_free (pipeline_desc); } }
static void get_device_data (ofGstDevice &webcam_device, int desired_framerate) { string pipeline_desc = webcam_device.gstreamer_src + " name=source device=" + webcam_device.video_device + " ! fakesink"; GError * err = NULL; GstElement * pipeline = gst_parse_launch (pipeline_desc.c_str(), &err); if ((pipeline == NULL) || (err != NULL)){ if (err){ ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data: %s", err->message); g_error_free (err); }else{ ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data, cannot get pipeline"); } if(pipeline) gst_object_unref (pipeline); return; } // TODO: try to lower seconds, // Start the pipeline and wait for max. 10 seconds for it to start up gst_element_set_state (pipeline, GST_STATE_PLAYING); GstStateChangeReturn ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND); // Check if any error messages were posted on the bus GstBus * bus = gst_element_get_bus (pipeline); GstMessage * msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0); gst_object_unref (bus); if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS)){ gst_element_set_state (pipeline, GST_STATE_PAUSED); GstElement *src = gst_bin_get_by_name (GST_BIN (pipeline), "source"); char *name; g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL); ofLog(OF_LOG_VERBOSE, "Device: %s (%s)\n", name==NULL?"":name, webcam_device.video_device.c_str()); GstPad *pad = gst_element_get_pad (src, "src"); GstCaps *caps = gst_pad_get_caps (pad); gst_object_unref (pad); get_supported_video_formats (webcam_device, *caps, desired_framerate); gst_caps_unref (caps); }else if(msg){ gchar *debug; gst_message_parse_error(msg, &err, &debug); ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data; module %s reported: %s", gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); g_error_free(err); g_free(debug); } gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); }
int main(int argc, char *argv[]) { GstElement *pipeline, *sink; ClutterTimeline *timeline; ClutterActor *stage, *texture; /* clutter-gst takes care of initializing Clutter and GStreamer */ if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) { g_error ("Failed to initialize clutter\n"); return -1; } stage = clutter_stage_get_default (); /* Make a timeline */ timeline = clutter_timeline_new (1000); g_object_set(timeline, "loop", TRUE, NULL); /* Create new texture and disable slicing so the video is properly mapped onto it */ texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL)); g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL); /* Build the GStreamer pipeline */ pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL); /* Instantiate the Clutter sink */ sink = gst_element_factory_make ("autocluttersink", NULL); if (sink == NULL) { /* Revert to the older cluttersink, in case autocluttersink was not found */ sink = gst_element_factory_make ("cluttersink", NULL); } if (sink == NULL) { g_printerr ("Unable to find a Clutter sink.\n"); return -1; } /* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/ g_object_set (sink, "texture", texture, NULL); /* Add the Clutter sink to the pipeline */ g_object_set (pipeline, "video-sink", sink, NULL); /* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); /* start the timeline */ clutter_timeline_start (timeline); /* Add texture to the stage, and show it */ clutter_group_add (CLUTTER_GROUP (stage), texture); clutter_actor_show_all (stage); clutter_main(); /* Free resources */ gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; }
static GstElement * setup_pipeline (const gchar * pipe_descr) { GstElement *pipeline; pipeline = gst_parse_launch (pipe_descr, NULL); g_return_val_if_fail (GST_IS_PIPELINE (pipeline), NULL); return pipeline; }
static GstElement * setup_pipeline (const gchar * pipe_descr) { GstElement *pipeline; pipeline = gst_parse_launch (pipe_descr, NULL); fail_unless (GST_IS_PIPELINE (pipeline)); return pipeline; }
/* Build the pipeline */ static gboolean build_test_pipeline (GSTPPipelineDescription * pipeline_desc, GError ** p_err) { const gchar *in_between = NULL; gboolean return_val = FALSE; gchar *test_pipeline_str = NULL; gchar *full_pipeline_str = NULL; g_assert (p_err != NULL); switch (pipeline_desc->test_type) { case TEST_PIPE_AUDIOSINK: test_pipeline_str = gst_properties_gconf_get_string ("default/audiosink"); break; case TEST_PIPE_VIDEOSINK: test_pipeline_str = gst_properties_gconf_get_string ("default/videosink"); break; case TEST_PIPE_SUPPLIED: test_pipeline_str = g_strdup (pipeline_desc->test_pipe); break; } switch (pipeline_desc->type) { case PIPE_TYPE_AUDIOSINK: case PIPE_TYPE_AUDIOSRC: in_between = "audioconvert ! audioresample"; break; default: in_between = "ffmpegcolorspace"; break; } switch (pipeline_desc->type) { case PIPE_TYPE_AUDIOSINK: case PIPE_TYPE_VIDEOSINK: full_pipeline_str = g_strdup_printf ("%s ! %s ! %s", test_pipeline_str, in_between, gst_pipeline_string_from_desc(pipeline_desc)); break; case PIPE_TYPE_AUDIOSRC: case PIPE_TYPE_VIDEOSRC: full_pipeline_str = g_strdup_printf ("%s ! %s ! %s", gst_pipeline_string_from_desc(pipeline_desc), in_between, test_pipeline_str); break; } if (full_pipeline_str) { gst_test_pipeline = gst_parse_launch (full_pipeline_str, p_err); if (*p_err == NULL && gst_test_pipeline != NULL) return_val = TRUE; } g_free (test_pipeline_str); g_free (full_pipeline_str); return return_val; }
void Pipeline::configure () { #ifdef Q_WS_WIN m_loop = g_main_loop_new (NULL, FALSE); #endif if (m_videoLocation.isEmpty ()) { qDebug ("No video file specified. Using video test source."); m_pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! " "video/x-raw, width=640, height=480, " "framerate=(fraction)30/1 ! " "glupload ! gleffects effect=5 ! fakesink sync=1", NULL)); } else { QByteArray ba = m_videoLocation.toLocal8Bit (); qDebug ("Loading video: %s", ba.data ()); gchar *pipeline = g_strdup_printf ("filesrc name=f ! " "decodebin ! gleffects effect=5 ! " "fakesink sync=1"); m_pipeline = GST_PIPELINE (gst_parse_launch (pipeline, NULL)); GstElement *f = gst_bin_get_by_name (GST_BIN (m_pipeline), "f"); g_object_set (G_OBJECT (f), "location", ba.data (), NULL); gst_object_unref (GST_OBJECT (f)); g_free (pipeline); } m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this); gst_bus_enable_sync_message_emission (m_bus); g_signal_connect (m_bus, "sync-message", G_CALLBACK (sync_bus_call), this); gst_object_unref (m_bus); gst_element_set_state (GST_ELEMENT (this->m_pipeline), GST_STATE_PAUSED); GstState state = GST_STATE_PAUSED; if (gst_element_get_state (GST_ELEMENT (this->m_pipeline), &state, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) { qDebug ("failed to pause pipeline"); return; } }