static gboolean gst_file_sink_set_location (GstFileSink * sink, const gchar * location, GError ** error) { if (sink->file) goto was_open; g_free (sink->filename); g_free (sink->uri); if (location != NULL) { /* we store the filename as we received it from the application. On Windows * this should be in UTF8 */ sink->filename = g_strdup (location); sink->uri = gst_filename_to_uri (location, NULL); GST_INFO ("filename : %s", sink->filename); GST_INFO ("uri : %s", sink->uri); } else { sink->filename = NULL; sink->uri = NULL; } return TRUE; /* ERRORS */ was_open: { g_warning ("Changing the `location' property on filesink when a file is " "open is not supported."); g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_STATE, "Changing the 'location' property on filesink when a file is " "open is not supported"); return FALSE; } }
static void add_to_playlist (GPtrArray * playlist, const gchar * filename) { GDir *dir; gchar *uri; if (gst_uri_is_valid (filename)) { g_ptr_array_add (playlist, g_strdup (filename)); return; } if ((dir = g_dir_open (filename, 0, NULL))) { const gchar *entry; /* FIXME: sort entries for each directory? */ while ((entry = g_dir_read_name (dir))) { gchar *path; path = g_strconcat (filename, G_DIR_SEPARATOR_S, entry, NULL); add_to_playlist (playlist, path); g_free (path); } g_dir_close (dir); return; } uri = gst_filename_to_uri (filename, NULL); if (uri != NULL) g_ptr_array_add (playlist, uri); else g_warning ("Could not make URI out of filename '%s'", filename); }
static gchar * ensure_uri (gchar * location) { if (gst_uri_is_valid (location)) return g_strdup (location); else return gst_filename_to_uri (location, NULL); }
static void generate_xml_media_descriptor (InsanityTest * test) { GError *err = NULL; GstDiscovererInfo *info = NULL; gchar *sublocation = NULL, *suburi = NULL; GstDiscoverer *discoverer = gst_discoverer_new (5 * GST_SECOND, NULL); insanity_test_get_string_argument (test, "sublocation", &sublocation); if (G_UNLIKELY (discoverer == NULL)) { ERROR (test, "Error creating discoverer: %s\n", err->message); g_clear_error (&err); insanity_test_done (test); goto done; } suburi = gst_filename_to_uri (sublocation, &err); if (err) { ERROR (test, "Could not construct filename"); g_clear_error (&err); goto done; } info = gst_discoverer_discover_uri (discoverer, suburi, &err); if (info == NULL) { ERROR (test, "Error discovering: %s\n", err->message); g_clear_error (&err); insanity_test_done (test); goto done; } glob_duration = gst_discoverer_info_get_duration (info); glob_seekable = gst_discoverer_info_get_seekable (info); glob_writer = media_descriptor_writer_new (test, sublocation, glob_duration, glob_seekable); glob_in_progress = TEST_SUBTTILE_DESCRIPTOR_GENERATION; g_idle_add ((GSourceFunc) idle_restart_pipeline, NULL); media_descriptor_writer_add_stream (glob_writer, glob_suboverlay_src_probe->pad); done: if (discoverer != NULL) g_object_unref (discoverer); if (info != NULL) gst_discoverer_info_unref (info); g_free (sublocation); g_free (suburi); }
gint main (gint argc, gchar ** argv) { GError *error = NULL; gchar *uri = NULL; pid_t pid; init (&argc, &argv); if (argc < 2) { fprintf (stderr, "usage: %s [av-filename-or-url]\n", argv[0]); return 1; } if (!g_strstr_len (argv[1], -1, "://")) { uri = gst_filename_to_uri (argv[1], &error); } else { uri = g_strdup (argv[1]); } if (error) { fprintf (stderr, "usage: %s [av-filename-or-url]\n", argv[0]); g_clear_error (&error); return 1; } if (socketpair (AF_UNIX, SOCK_STREAM, 0, pipes)) { fprintf (stderr, "Error creating pipes: %s\n", strerror (errno)); return 2; } if (fcntl (pipes[0], F_SETFL, O_NONBLOCK) < 0 || fcntl (pipes[1], F_SETFL, O_NONBLOCK) < 0) { fprintf (stderr, "Error setting O_NONBLOCK on pipes: %s\n", strerror (errno)); return 2; } pid = fork (); if (pid < 0) { fprintf (stderr, "Error forking: %s\n", strerror (errno)); return 1; } else if (pid > 0) { setenv ("GST_DEBUG_FILE", "gstsrc.log", 1); gst_init (&argc, &argv); start_source (uri); } else { setenv ("GST_DEBUG_FILE", "gstsink.log", 1); gst_init (&argc, &argv); start_sink (); } g_free (uri); run (pid); return 0; }
gchar * ges_test_file_uri (const gchar * filename) { gchar *path, *uri; path = g_build_filename (GES_TEST_FILES_PATH, filename, NULL); uri = gst_filename_to_uri (path, NULL); g_free (path); return uri; }
gint main (gint argc, gchar * argv[]) { GstElement *playbin; GMainLoop *loop; GstBus *bus; guint bus_watch_id; gchar *uri; gst_init (&argc, &argv); if (argc < 2) { g_print ("usage: %s <media file or uri>\n", argv[0]); return 1; } playbin = gst_element_factory_make ("playbin", NULL); if (!playbin) { g_print ("'playbin' gstreamer plugin missing\n"); return 1; } /* take the commandline argument and ensure that it is a uri */ if (gst_uri_is_valid (argv[1])) uri = g_strdup (argv[1]); else uri = gst_filename_to_uri (argv[1], NULL); g_object_set (playbin, "uri", uri, NULL); g_free (uri); /* create an event loop and feed gstreamer bus messages to it */ loop = g_main_loop_new (NULL, FALSE); bus = gst_element_get_bus (playbin); bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); g_object_unref (bus); /* start play back and listed to events */ gst_element_set_state (playbin, GST_STATE_PLAYING); g_main_loop_run (loop); /* cleanup */ gst_element_set_state (playbin, GST_STATE_NULL); g_object_unref (playbin); g_source_remove (bus_watch_id); g_main_loop_unref (loop); return 0; }
GESClip * make_source (char *path, guint64 start, guint64 duration, gint priority) { gchar *uri = gst_filename_to_uri (path, NULL); GESClip *ret = GES_CLIP (ges_uri_clip_new (uri)); g_object_set (ret, "start", (guint64) start, "duration", (guint64) duration, "priority", (guint32) priority, "in-point", (guint64) 0, NULL); g_free (uri); return ret; }
GESTimelineObject * make_source (char *path, guint64 start, guint64 duration, gint priority) { gchar *uri = gst_filename_to_uri (path, NULL); GESTimelineObject *ret = GES_TIMELINE_OBJECT (ges_timeline_filesource_new (uri)); g_object_set (ret, "start", (guint64) start, "duration", (guint64) duration, "priority", (guint32) priority, "in-point", (guint64) 0, NULL); g_free (uri); return ret; }
static gboolean gst_file_src_set_location (GstFileSrc * src, const gchar * location) { GstState state; /* the element must be stopped in order to do this */ GST_OBJECT_LOCK (src); state = GST_STATE (src); if (state != GST_STATE_READY && state != GST_STATE_NULL) goto wrong_state; GST_OBJECT_UNLOCK (src); g_free (src->filename); g_free (src->uri); /* clear the filename if we get a NULL (is that possible?) */ if (location == NULL) { src->filename = NULL; src->uri = NULL; } else { /* we store the filename as received by the application. On Windows this * should be UTF8 */ src->filename = g_strdup (location); src->uri = gst_filename_to_uri (location, NULL); GST_INFO ("filename : %s", src->filename); GST_INFO ("uri : %s", src->uri); } g_object_notify (G_OBJECT (src), "location"); gst_uri_handler_new_uri (GST_URI_HANDLER (src), src->uri); return TRUE; /* ERROR */ wrong_state: { g_warning ("Changing the `location' property on filesrc when a file is " "open is not supported."); GST_OBJECT_UNLOCK (src); return FALSE; } }
static void add_to_playlist (GPtrArray * playlist, const gchar * filename) { GDir *dir; gchar *uri; if (gst_uri_is_valid (filename)) { g_ptr_array_add (playlist, g_strdup (filename)); return; } if ((dir = g_dir_open (filename, 0, NULL))) { const gchar *entry; GList *l, *files = NULL; while ((entry = g_dir_read_name (dir))) { gchar *path; path = g_build_filename (filename, entry, NULL); files = g_list_insert_sorted (files, path, compare); } g_dir_close (dir); for (l = files; l != NULL; l = l->next) { gchar *path = (gchar *) l->data; add_to_playlist (playlist, path); g_free (path); } g_list_free (files); return; } uri = gst_filename_to_uri (filename, NULL); if (uri != NULL) g_ptr_array_add (playlist, uri); else g_warning ("Could not make URI out of filename '%s'", filename); }
bool ofGstVideoPlayer::load(string name){ if( name.find( "file://",0 ) != string::npos){ bIsStream = bAsyncLoad; }else if( name.find( "://",0 ) == string::npos){ GError * err = NULL; gchar* name_ptr = gst_filename_to_uri(ofToDataPath(name).c_str(),&err); name = name_ptr; g_free(name_ptr); if(err) g_free(err); //name = ofToDataPath(name); bIsStream = bAsyncLoad; }else{ bIsStream = true; } ofLogVerbose("ofGstVideoPlayer") << "loadMovie(): loading \"" << name << "\""; if(isInitialized()){ gst_element_set_state (videoUtils.getPipeline(), GST_STATE_READY); if(!bIsStream){ gst_element_get_state (videoUtils.getPipeline(), NULL, NULL, -1); } internalPixelFormat = OF_PIXELS_NATIVE; bIsAllocated = false; videoUtils.reallocateOnNextFrame(); g_object_set(G_OBJECT(videoUtils.getPipeline()), "uri", name.c_str(), (void*)NULL); gst_element_set_state (videoUtils.getPipeline(), GST_STATE_PAUSED); if(!bIsStream){ gst_element_get_state (videoUtils.getPipeline(), NULL, NULL, -1); return allocate(); }else{ return true; } }else{ ofGstUtils::startGstMainLoop(); return createPipeline(name) && videoUtils.startPipeline() && (bIsStream || allocate()); } }
static void test_disco_sync_reuse (const gchar * test_fn, guint num, GstClockTime timeout) { GError *err = NULL; GstDiscoverer *dc; GstDiscovererInfo *info; GstDiscovererResult result; gchar *uri, *path; int i; dc = gst_discoverer_new (timeout, &err); fail_unless (dc != NULL); fail_unless (err == NULL); /* GST_TEST_FILE comes from makefile CFLAGS */ path = g_build_filename (GST_TEST_FILES_PATH, test_fn, NULL); uri = gst_filename_to_uri (path, &err); g_free (path); fail_unless (err == NULL); for (i = 0; i < num; ++i) { GST_INFO ("[%02d] discovering uri '%s'", i, uri); info = gst_discoverer_discover_uri (dc, uri, &err); if (info) { result = gst_discoverer_info_get_result (info); GST_INFO ("result: %d", result); gst_discoverer_info_unref (info); } /* in case we don't have some of the elements needed */ if (err) { g_error_free (err); err = NULL; } } g_free (uri); g_object_unref (dc); }
G_MODULE_EXPORT void do_button_play_clicked(GtkButton *button, gpointer data) { gchar *uri; const gchar * content = gtk_entry_get_text(main_window_sub_widget.entry1); g_strlcpy(g_filename, content, MAX_PATH); gst_element_set_state(gst_data.playbin, GST_STATE_READY); #if (TRANS_TYPE == TRANS_TYPE_TCP) uri = g_strdup (g_filename); g_object_set (gst_data.source, "location", uri, NULL); #else if (gst_uri_is_valid (g_filename)) uri = g_strdup (g_filename); else uri = gst_filename_to_uri (g_filename, NULL); g_object_set (gst_data.source, "uri", uri, NULL); #endif g_print("%s: %s", __FUNCTION__, uri); g_free (uri); gst_element_set_state (gst_data.playbin, GST_STATE_PLAYING); }
int main (int argc, gchar ** argv) { GESTimelinePipeline *pipeline; GESTimeline *timeline; GESTrack *tracka; GESTimelineLayer *layer; GMainLoop *mainloop; guint i; if (argc < 2) { g_print ("Usage: %s <list of audio files>\n", argv[0]); return -1; } /* Initialize GStreamer (this will parse environment variables and commandline * arguments. */ gst_init (&argc, &argv); /* Initialize the GStreamer Editing Services */ ges_init (); /* Setup of an audio timeline */ /* This is our main GESTimeline */ timeline = ges_timeline_new (); tracka = ges_track_audio_raw_new (); /* We are only going to be doing one layer of timeline objects */ layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer)) return -1; if (!ges_timeline_add_track (timeline, tracka)) return -1; /* Here we've finished initializing our timeline, we're * ready to start using it... by solely working with the layer ! */ for (i = 1; i < argc; i++) { gchar *uri = gst_filename_to_uri (argv[i], NULL); GESTimelineFileSource *src = ges_timeline_filesource_new (uri); g_assert (src); g_free (uri); g_object_set (src, "duration", GST_SECOND, NULL); /* Since we're using a GESSimpleTimelineLayer, objects will be automatically * appended to the end of the layer */ ges_timeline_layer_add_object (layer, (GESTimelineObject *) src); } /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_timeline_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return -1; /* The following is standard usage of a GStreamer pipeline (note how you haven't * had to care about GStreamer so far ?). * * We set the pipeline to playing ... */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); /* .. and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in * order to function properly ! */ mainloop = g_main_loop_new (NULL, FALSE); /* Simple code to have the mainloop shutdown after 4s */ g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
bool MediaImpl::loadMovie(QString filename) { _uri = filename; qDebug() << "Opening movie: " << filename << "."; this->_frame = NULL; // Free previously allocated structures unloadMovie(); //_firstFrameTime=_formatContext->start_time; // Initialize GStreamer. gst_init (NULL, NULL); GstElement *capsFilter = NULL; GstElement *videoScale = NULL; // Create the elements. _source = gst_element_factory_make ("uridecodebin", "source"); // _audioQueue = gst_element_factory_make ("queue", "aqueue"); // _audioConvert = gst_element_factory_make ("audioconvert", "aconvert"); // _audioResample = gst_element_factory_make ("audioresample", "aresample"); // _audioSink = gst_element_factory_make ("appsink", "asink"); // _videoQueue = gst_element_factory_make ("queue", "vqueue"); _videoColorSpace = gst_element_factory_make ("videoconvert", "vcolorspace"); videoScale = gst_element_factory_make ("videoscale", "videoscale0"); capsFilter = gst_element_factory_make ("capsfilter", "capsfilter0"); _videoSink = gst_element_factory_make ("appsink", "vsink"); // Prepare handler data. // _padHandlerData.audioToConnect = _audioQueue; _padHandlerData.videoToConnect = _videoQueue; _padHandlerData.videoSink = _videoSink; //_padHandlerData.audioIsConnected = false; _padHandlerData.videoIsConnected = false; // _newAudioBufferHandlerData.audioSink = _audioSink; // _newAudioBufferHandlerData.audioBufferAdapter = _audioBufferAdapter; // Create the empty pipeline. _pipeline = gst_pipeline_new ( "video-source-pipeline" ); if (!_pipeline || !_source || // !_audioQueue || !_audioConvert || !_audioResample || !_audioSink || !_videoQueue || !_videoColorSpace || ! videoScale || ! capsFilter || ! _videoSink) { g_printerr ("Not all elements could be created.\n"); unloadMovie(); return -1; } // Build the pipeline. Note that we are NOT linking the source at this // point. We will do it later. gst_bin_add_many (GST_BIN (_pipeline), _source, // _audioQueue, _audioConvert, _audioResample, _audioSink, _videoQueue, _videoColorSpace, videoScale, capsFilter, _videoSink, NULL); // if (!gst_element_link_many(_audioQueue, _audioConvert, _audioResample, _audioSink, NULL)) { // g_printerr ("Audio elements could not be linked.\n"); // unloadMovie(); // return false; // } if (!gst_element_link_many (_videoQueue, _videoColorSpace, capsFilter, videoScale, _videoSink, NULL)) { g_printerr ("Video elements could not be linked.\n"); unloadMovie(); return false; } // Process URI. gchar* uri = (gchar*) filename.toUtf8().constData(); if (!gst_uri_is_valid(uri)) { // Try to convert filename to URI. GError* error = NULL; uri = gst_filename_to_uri(uri, &error); if (error) { qDebug() << "Filename to URI error: " << error->message; g_error_free(error); gst_object_unref (uri); freeResources(); return false; } } // Set URI to be played. qDebug() << "URI for uridecodebin: " << uri; // FIXME: sometimes it's just the path to the directory that is given, not the file itself. g_object_set (_source, "uri", uri, NULL); // Connect to the pad-added signal g_signal_connect (_source, "pad-added", G_CALLBACK (MediaImpl::gstPadAddedCallback), &_padHandlerData); // Configure audio appsink. // TODO: change from mono to stereo // gchar* audioCapsText = g_strdup_printf ("audio/x-raw-float,channels=1,rate=%d,signed=(boolean)true,width=%d,depth=%d,endianness=BYTE_ORDER", // Engine::signalInfo().sampleRate(), (int)(sizeof(Signal_T)*8), (int)(sizeof(Signal_T)*8) ); // GstCaps* audioCaps = gst_caps_from_string (audioCapsText); // g_object_set (_audioSink, "emit-signals", TRUE, // "caps", audioCaps, //// "max-buffers", 1, // only one buffer (the last) is maintained in the queue //// "drop", TRUE, // ... other buffers are dropped // NULL); // g_signal_connect (_audioSink, "new-buffer", G_CALLBACK (VideoImpl::gstNewAudioBufferCallback), &_newAudioBufferHandlerData); // gst_caps_unref (audioCaps); // g_free (audioCapsText); // Configure video appsink. // GstCaps *videoCaps = gst_caps_from_string ("video/x-raw-rgb"); GstCaps *videoCaps = gst_caps_from_string ("video/x-raw,format=RGBA"); g_object_set (capsFilter, "caps", videoCaps, NULL); g_object_set (_videoSink, "emit-signals", TRUE, "max-buffers", 1, // only one buffer (the last) is maintained in the queue "drop", TRUE, // ... other buffers are dropped NULL); g_signal_connect (_videoSink, "new-sample", G_CALLBACK (MediaImpl::gstNewSampleCallback), this); gst_caps_unref (videoCaps); // Listen to the bus. _bus = gst_element_get_bus (_pipeline); // Start playing. if (!setPlayState(true)) return false; qDebug() << "Pipeline started."; //_movieReady = true; return true; }
/* Test Callbacks and vmethods*/ static GstPipeline * create_pipeline (InsanityGstPipelineTest * ptest, gpointer unused_data) { GstCaps *caps; gulong probe_id; GError *err = NULL; GstIterator *it = NULL; gchar *uri = NULL, *sublocation = NULL; GstElement *capsfilter = NULL, *capsfilter1 = NULL, *colorspace = NULL, *colorspace1 = NULL, *fakesink = NULL; GstPad *fakesinksink = NULL, *tmppad = NULL; InsanityTest *test = INSANITY_TEST (ptest); SUBTITLES_TEST_LOCK (); glob_pipeline = GST_ELEMENT (gst_pipeline_new ("pipeline")); /* Create the source */ insanity_test_get_boolean_argument (test, "push-mode", (gboolean *) & glob_push_mode); insanity_test_get_string_argument (test, "sublocation", &sublocation); if (sublocation == NULL || g_strcmp0 (sublocation, "") == 0) { ERROR (test, "Location name not set\n"); goto creation_failed; } uri = gst_filename_to_uri (sublocation, &err); if (err != NULL) { ERROR (test, "Error creating uri %s", err->message); goto creation_failed; } if (glob_push_mode == TRUE) { gchar *tmpuri; glob_uridecodebin = gst_element_factory_make ("pushfilesrc", "src"); tmpuri = g_strconcat ("push", uri, NULL); g_free (uri); uri = tmpuri; } glob_uridecodebin = gst_element_factory_make ("uridecodebin", "src"); g_signal_connect (glob_uridecodebin, "pad-added", G_CALLBACK (pad_added_cb), test); g_object_set (glob_uridecodebin, "uri", uri, NULL); /* the subtitleoverlay */ glob_suboverlay = gst_element_factory_make ("subtitleoverlay", "subtitleoverlay"); if (glob_suboverlay == NULL) goto creation_failed; /* the fakesink */ fakesink = gst_element_factory_make ("fakesink", "fakesink"); if (fakesink == NULL) goto creation_failed; /* and the videotestsrc */ glob_videotestsrc = gst_element_factory_make ("videotestsrc", "videotestsrc"); if (glob_videotestsrc == NULL) goto creation_failed; g_object_set (glob_videotestsrc, "pattern", 2, "do-timestamp", TRUE, NULL); /* Make sure the video is big enough */ capsfilter = gst_element_factory_make ("capsfilter", NULL); if (capsfilter == NULL) goto creation_failed; gst_video_info_init (&glob_video_info); gst_video_info_set_format (&glob_video_info, GST_VIDEO_FORMAT_RGB, 1920, 1080); caps = gst_video_info_to_caps (&glob_video_info); g_object_set (capsfilter, "caps", caps, NULL); capsfilter1 = gst_element_factory_make ("capsfilter", NULL); if (capsfilter1 == NULL) goto creation_failed; /* We want the last frame that we will "parse" to check if it contains * subtitles to be in RGB to make simpler for us */ g_object_set (capsfilter1, "caps", caps, NULL); colorspace = gst_element_factory_make ("videoconvert", NULL); if (colorspace == NULL) goto creation_failed; colorspace1 = gst_element_factory_make ("videoconvert", NULL); if (colorspace1 == NULL) goto creation_failed; /* Now add to the pipeline */ gst_bin_add_many (GST_BIN (glob_pipeline), glob_uridecodebin, glob_videotestsrc, capsfilter, glob_suboverlay, capsfilter1, colorspace, colorspace1, fakesink, NULL); /* link video branch elements */ gst_element_link_many (glob_videotestsrc, capsfilter, glob_suboverlay, colorspace, capsfilter1, fakesink, NULL); /* And install a probe to the subtitleoverlay src pad */ fakesinksink = gst_element_get_static_pad (fakesink, "sink"); if (fakesinksink == NULL) goto failed; if (insanity_gst_test_add_data_probe (INSANITY_GST_TEST (test), GST_BIN (glob_pipeline), GST_OBJECT_NAME (fakesink), GST_OBJECT_NAME (fakesinksink), &tmppad, &probe_id, &probe_cb, NULL, NULL) == TRUE) { glob_suboverlay_src_probe = g_slice_new0 (ProbeContext); glob_suboverlay_src_probe->probe_id = probe_id; glob_suboverlay_src_probe->pad = fakesinksink; glob_suboverlay_src_probe->element = fakesink; glob_suboverlay_src_probe->test = test; glob_suboverlay_src_probe->waiting_first_segment = TRUE; insanity_test_validate_checklist_item (test, "install-probes", TRUE, NULL); } else { insanity_test_validate_checklist_item (test, "install-probes", FALSE, "Failed to attach probe to fakesink"); insanity_test_done (test); goto failed; } g_signal_connect (GST_CHILD_PROXY (glob_suboverlay), "child-added", G_CALLBACK (suboverlay_child_added_cb), test); done: SUBTITLES_TEST_UNLOCK (); g_free (uri); g_free (sublocation); if (err != NULL) g_error_free (err); if (it != NULL) gst_iterator_free (it); return GST_PIPELINE (glob_pipeline); failed: if (glob_pipeline != NULL) gst_object_unref (glob_pipeline); glob_suboverlay = glob_pipeline = glob_videotestsrc = glob_uridecodebin = NULL; goto done; creation_failed: if (glob_uridecodebin != NULL) gst_object_unref (glob_uridecodebin); if (glob_suboverlay != NULL) gst_object_unref (glob_suboverlay); if (glob_videotestsrc != NULL) gst_object_unref (glob_videotestsrc); if (fakesink != NULL) gst_object_unref (fakesink); goto failed; }
bool ofGstVideoPlayer::loadMovie(string name){ close(); if( name.find( "file://",0 ) != string::npos){ bIsStream = false; }else if( name.find( "://",0 ) == string::npos){ GError * err = NULL; name = gst_filename_to_uri(ofToDataPath(name).c_str(),&err); bIsStream = false; }else{ bIsStream = true; } ofLogVerbose("ofGstVideoPlayer") << "loadMovie(): loading \"" << name << "\""; ofGstUtils::startGstMainLoop(); #if GST_VERSION_MAJOR==0 GstElement * gstPipeline = gst_element_factory_make("playbin2","player"); #else GstElement * gstPipeline = gst_element_factory_make("playbin","player"); #endif g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL); // create the oF appsink for video rgb without sync to clock GstElement * gstSink = gst_element_factory_make("appsink", "app_sink"); gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true); gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink), 8); gst_app_sink_set_drop (GST_APP_SINK(gstSink),true); gst_base_sink_set_max_lateness (GST_BASE_SINK(gstSink), -1); #if GST_VERSION_MAJOR==0 GstCaps *caps; int bpp; switch(internalPixelFormat){ case OF_PIXELS_MONO: bpp = 8; caps = gst_caps_new_simple("video/x-raw-gray", "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 8, NULL); break; case OF_PIXELS_RGB: bpp = 24; caps = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 24, "endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff0000, "green_mask",G_TYPE_INT,0x00ff00, "blue_mask",G_TYPE_INT,0x0000ff, NULL); break; case OF_PIXELS_RGBA: bpp = 32; caps = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 32, "endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff000000, "green_mask",G_TYPE_INT,0x00ff0000, "blue_mask",G_TYPE_INT,0x0000ff00, "alpha_mask",G_TYPE_INT,0x000000ff, NULL); case OF_PIXELS_BGRA: bpp = 32; caps = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 32, "endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0x0000ff00, "green_mask",G_TYPE_INT,0x00ff0000, "blue_mask",G_TYPE_INT,0xff000000, "alpha_mask",G_TYPE_INT,0x000000ff, NULL); break; default: bpp = 32; caps = gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 24, "endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff0000, "green_mask",G_TYPE_INT,0x00ff00, "blue_mask",G_TYPE_INT,0x0000ff, NULL); break; } #else int bpp; string mime="video/x-raw"; string format; switch(internalPixelFormat){ case OF_PIXELS_MONO: format = "GRAY8"; bpp = 8; break; case OF_PIXELS_RGB: format = "RGB"; bpp = 24; break; case OF_PIXELS_RGBA: format = "RGBA"; bpp = 32; break; case OF_PIXELS_BGRA: format = "BGRA"; bpp = 32; break; default: format = "RGB"; bpp=24; break; } GstCaps *caps = gst_caps_new_simple(mime.c_str(), "format", G_TYPE_STRING, format.c_str(), /*"bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, 24, "endianness",G_TYPE_INT,4321, "red_mask",G_TYPE_INT,0xff0000, "green_mask",G_TYPE_INT,0x00ff00, "blue_mask",G_TYPE_INT,0x0000ff, "alpha_mask",G_TYPE_INT,0x000000ff,*/ NULL); #endif gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps); gst_caps_unref(caps); if(threadAppSink){ GstElement * appQueue = gst_element_factory_make("queue","appsink_queue"); g_object_set(G_OBJECT(appQueue), "leaky", 0, "silent", 1, (void*)NULL); GstElement* appBin = gst_bin_new("app_bin"); gst_bin_add(GST_BIN(appBin), appQueue); GstPad* appQueuePad = gst_element_get_static_pad(appQueue, "sink"); GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", appQueuePad); gst_object_unref(appQueuePad); gst_element_add_pad(appBin, ghostPad); gst_bin_add_many(GST_BIN(appBin), gstSink, NULL); gst_element_link_many(appQueue, gstSink, NULL); g_object_set (G_OBJECT(gstPipeline),"video-sink",appBin,(void*)NULL); }else{ g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL); } #ifdef TARGET_WIN32 GstElement *audioSink = gst_element_factory_make("directsoundsink", NULL); g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL); #endif videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream); videoUtils.startPipeline(); if(!bIsStream) return allocate(bpp); else return true; }
bool MediaImpl::loadMovie(QString filename) { gchar* filetestpath = (gchar*) filename.toUtf8().constData(); if (FALSE == g_file_test(filetestpath, G_FILE_TEST_EXISTS)) { std::cout << "File " << filetestpath << " does not exist" << std::endl; return false; } _uri = filename; qDebug() << "Opening movie: " << filename << "."; // Free previously allocated structures unloadMovie(); // Initialize GStreamer. GstElement *capsfilter0 = NULL; GstElement *videoscale0 = NULL; // Create the elements. if (_isSharedMemorySource) { _shmsrc0 = gst_element_factory_make ("shmsrc", "shmsrc0"); _gdpdepay0 = gst_element_factory_make ("gdpdepay", "gdpdepay0"); _pollSource = g_timeout_source_new (500); g_source_set_callback (_pollSource, gstPollShmsrc, this, NULL); g_source_attach (_pollSource, g_main_context_default()); g_source_unref (_pollSource); } else { _uridecodebin0 = gst_element_factory_make ("uridecodebin", "uridecodebin0"); } _queue0 = gst_element_factory_make ("queue", "queue0"); _videoconvert0 = gst_element_factory_make ("videoconvert", "videoconvert0"); videoscale0 = gst_element_factory_make ("videoscale", "videoscale0"); capsfilter0 = gst_element_factory_make ("capsfilter", "capsfilter0"); _appsink0 = gst_element_factory_make ("appsink", "appsink0"); // Prepare handler data. _padHandlerData.videoToConnect = _queue0; _padHandlerData.videoSink = _appsink0; _padHandlerData.videoIsConnected = false; _audioqueue0 = gst_element_factory_make ("queue", "audioqueue0"); _audioconvert0 = gst_element_factory_make ("audioconvert", "audioconvert0"); _audioresample0 = gst_element_factory_make ("audioresample", "audioresample0"); _audiovolume0 = gst_element_factory_make ("volume", "audiovolume0"); _audiosink0 = gst_element_factory_make ("autoaudiosink", "audiosink0"); _padHandlerData.audioToConnect = _audioqueue0; // Create the empty pipeline. _pipeline = gst_pipeline_new ( "video-source-pipeline" ); if (!_pipeline || !_queue0 || !_videoconvert0 || ! videoscale0 || ! capsfilter0 || !_appsink0 || !_audioqueue0 || !_audioconvert0 || !_audioresample0 || !_audiovolume0 || !_audiosink0) { g_printerr ("Not all elements could be created.\n"); if (! _pipeline) g_printerr("_pipeline"); if (! _queue0) g_printerr("_queue0"); if (! _videoconvert0) g_printerr("_videoconvert0"); if (! videoscale0) g_printerr("videoscale0"); if (! capsfilter0) g_printerr("capsfilter0"); if (! _appsink0) g_printerr("_appsink0"); if (! _audioqueue0) g_printerr("_audioqueue0"); if (! _audioconvert0) g_printerr("_audioconvert0"); if (! _audioresample0) g_printerr("_audioresample0"); if (! _audiovolume0) g_printerr("_audiovolume0"); if (! _audiosink0) g_printerr("_audiosink0"); unloadMovie(); return -1; } if (_isSharedMemorySource) { if (! _shmsrc0 || ! _gdpdepay0) { g_printerr ("Not all elements could be created.\n"); if (! _shmsrc0) g_printerr("_shmsrc0"); if (! _gdpdepay0) g_printerr("_gdpdepay0"); unloadMovie(); return -1; } } else { if (! _uridecodebin0) { g_printerr ("Not all elements could be created.\n"); if (! _uridecodebin0) g_printerr("_uridecodebin0"); unloadMovie(); return -1; } } // Build the pipeline. Note that we are NOT linking the source at this // point. We will do it later. gst_bin_add_many (GST_BIN (_pipeline), _isSharedMemorySource ? _shmsrc0 : _uridecodebin0, _queue0, _videoconvert0, videoscale0, capsfilter0, _appsink0, // _audioqueue0, _audioconvert0, _audioresample0, _audiovolume0, _audiosink0, NULL); // special case for shmsrc if (_isSharedMemorySource) { gst_bin_add (GST_BIN(_pipeline), _gdpdepay0); if (! gst_element_link_many (_shmsrc0, _gdpdepay0, _queue0, NULL)) { g_printerr ("Could not link shmsrc, deserializer and video queue.\n"); } } // link uridecodebin -> queue will be performed by callback if (! gst_element_link_many (_queue0, _videoconvert0, capsfilter0, videoscale0, _appsink0, NULL)) { g_printerr ("Could not link video queue, colorspace converter, caps filter, scaler and app sink.\n"); unloadMovie(); return false; } // if (! gst_element_link_many (_audioqueue0, _audioconvert0, _audioresample0, // _audiovolume0, _audiosink0, NULL)) // { // g_printerr ("Could not link audio queue, converter, resampler and audio sink.\n"); // unloadMovie(); // return false; // } // Process URI. QByteArray ba = filename.toLocal8Bit(); gchar *filename_tmp = g_strdup((gchar*) filename.toUtf8().constData()); gchar* uri = NULL; // (gchar*) filename.toUtf8().constData(); if (! _isSharedMemorySource && ! gst_uri_is_valid(uri)) { // Try to convert filename to URI. GError* error = NULL; qDebug() << "Calling gst_filename_to_uri : " << uri; uri = gst_filename_to_uri(filename_tmp, &error); if (error) { qDebug() << "Filename to URI error: " << error->message; g_error_free(error); gst_object_unref (uri); freeResources(); return false; } } g_free(filename_tmp); if (_isSharedMemorySource) { uri = (gchar*) ba.data(); } // Set URI to be played. qDebug() << "URI for uridecodebin: " << uri; // FIXME: sometimes it's just the path to the directory that is given, not the file itself. // Connect to the pad-added signal if (! _isSharedMemorySource) { g_signal_connect (_uridecodebin0, "pad-added", G_CALLBACK (MediaImpl::gstPadAddedCallback), &_padHandlerData); g_object_set (_uridecodebin0, "uri", uri, NULL); } else { //qDebug() << "LIVE mode" << uri; g_object_set (_shmsrc0, "socket-path", uri, NULL); g_object_set (_shmsrc0, "is-live", TRUE, NULL); _padHandlerData.videoIsConnected = true; } g_free(uri); // Configure audio appsink. // TODO: change from mono to stereo // gchar* audioCapsText = g_strdup_printf ("audio/x-raw-float,channels=1,rate=%d,signed=(boolean)true,width=%d,depth=%d,endianness=BYTE_ORDER", // Engine::signalInfo().sampleRate(), (int)(sizeof(Signal_T)*8), (int)(sizeof(Signal_T)*8) ); // GstCaps* audioCaps = gst_caps_from_string (audioCapsText); // g_object_set (_audioSink, "emit-signals", TRUE, // "caps", audioCaps, //// "max-buffers", 1, // only one buffer (the last) is maintained in the queue //// "drop", TRUE, // ... other buffers are dropped // NULL); // g_signal_connect (_audioSink, "new-buffer", G_CALLBACK (VideoImpl::gstNewAudioBufferCallback), &_newAudioBufferHandlerData); // gst_caps_unref (audioCaps); // g_free (audioCapsText); // Configure video appsink. GstCaps *videoCaps = gst_caps_from_string ("video/x-raw,format=RGBA"); g_object_set (capsfilter0, "caps", videoCaps, NULL); g_object_set (_appsink0, "emit-signals", TRUE, "max-buffers", 1, // only one buffer (the last) is maintained in the queue "drop", TRUE, // ... other buffers are dropped "sync", TRUE, NULL); g_signal_connect (_appsink0, "new-sample", G_CALLBACK (MediaImpl::gstNewSampleCallback), this); gst_caps_unref (videoCaps); g_object_set (_audiovolume0, "mute", false, NULL); g_object_set (_audiovolume0, "volume", 0.0, NULL); // Listen to the bus. _bus = gst_element_get_bus (_pipeline); // Start playing. if (! _isSharedMemorySource && ! setPlayState(true)) { return false; } return true; }
static gboolean gst_flite_src_set_location (GstFliteSrc * src, const gchar * location) { GstState state; gchar * input_uri; gchar ** input_params; gchar * flite_exec_path; int lang_offset; int text_offset; /* the element must be stopped in order to do this */ GST_OBJECT_LOCK (src); state = GST_STATE (src); if (state != GST_STATE_READY && state != GST_STATE_NULL) goto wrong_state; GST_OBJECT_UNLOCK (src); g_free (src->filename); g_free (src->uri); input_uri = g_uri_unescape_string (location, NULL); /* clear the filename if we get a NULL */ if (location == NULL && input_uri) { src->filename = NULL; src->uri = NULL; } else { /* we store the filename as received by the application. On Windows this * should be UTF8 */ // sample -> ?lang=hi&text=<yada yada> GST_WARNING("Input uri: %s", input_uri); input_params = g_strsplit_set (input_uri, "=&", -1); GST_WARNING("0:%s,1:%s, 2:%s, 3:%s", *(input_params), *(input_params + 1), *(input_params+2), *(input_params+3)); if((input_params) !=NULL && strcasecmp (*(input_params), "lang") == 0){ lang_offset = 1; text_offset = 3; } else if((input_params +2) != NULL && strcasecmp (*(input_params+2), "lang") == 0){ lang_offset = 3; text_offset = 1; } // error checking // if((input_params+ lang_offset) !=NULL){ // if(g_str_equal(*(input_params+lang_offset), "hi")){ // flite_exec_path = g_strdup ("~/gst-home/flitevox/flite_iitm_indic_male"); // GST_WARNING("Pointer not null"); // } // else if(g_str_equal(*(input_params+lang_offset), "ta")){ // flite_exec_path = g_strdup ("~/gst-home/flitevox/flite_iitm_indic_aarthi"); // } // else if(g_str_equal(*(input_params+lang_offset), "mr")){ // flite_exec_path = g_strdup ("~/gst-home/flitevox/flite_cdac_indic_bme"); // } // else{ // // default case. Fallback to hindi. // flite_exec_path = g_strdup ("~/gst-home/flitevox/flite_iitm_indic_male"); // GST_WARNING("Pointer null"); // } // } // else{ // // default case. Fallback to hindi. // flite_exec_path = g_strdup ("~/gst-home/flitevox/flite_iitm_indic_male"); // GST_WARNING("Pointer null"); // } // create filename struct timeval tim; gettimeofday(&tim, NULL); double timestamp=tim.tv_usec; gchar * filename = g_strdup_printf("/extra_hdd/gen-files/%G.wav", timestamp); flite_exec_path = g_strdup("/home/ubuntu/gst-home/gst-rtsp-server/festival_interface"); //flite_exec_path = g_strdup("/extra_hdd/tts-home/festival/festival-server/server"); if((input_params + text_offset) != NULL){ // system(g_strdup_printf ("%s \" %s \" %s", flite_exec_path, g_uri_unescape_string (*(input_params + text_offset), NULL), filename)); //system(g_strdup_printf ("%s %s \" %s \" %G", flite_exec_path, *(input_params + lang_offset), g_uri_unescape_string (*(input_params + text_offset), NULL), timestamp)); rtrim(*(input_params+text_offset)); ltrim(*(input_params+text_offset)); strip(*(input_params+text_offset)); FILE * fp; char lang1[110], lang2[110], basedir[100]; strcpy(basedir, "/extra_hdd/tts-home/festival/festival-server/"); strcpy(lang1, basedir); strcat(lang1, *(input_params + lang_offset)); strcpy(lang2, basedir); strcat(lang2, *(input_params + lang_offset)); fp = fopen (strcat(lang1,".txt"), "a+"); fprintf(fp, "%s!@#@@%G\n",*(input_params + text_offset), timestamp); fclose(fp); FILE *fpout; char out_filename[100]; fpout= fopen(strcat(lang2,"-out.txt"), "r"); char *ptr; while(1){ fscanf(fpout, "%s", out_filename); //if(out_filename == atoi(argv[3])){break;} long ret; ret = strtol(out_filename, &ptr, 10); if((double) ret == timestamp){break;} } fclose(fpout); // system(g_strdup_printf ("%s %s \" %s \" %G", flite_exec_path, *(input_params + lang_offset), *(input_params + text_offset), timestamp)); // make_audio_file(*(input_params + lang_offset), g_uri_unescape_string (*(input_params + text_offset), NULL), filename); //system(g_strdup_printf ("%s %s \" %s \" %s", flite_exec_path, *(input_params + lang_offset), g_uri_unescape_string (*(input_params + text_offset), NULL), filename)); } GST_WARNING("call done, now play wav"); GST_WARNING("Filename :%s", filename); src->filename = g_strdup (filename); g_free(input_uri); g_free(input_params); g_free(flite_exec_path); // src->filename = g_strdup (location); src->uri = gst_filename_to_uri (location, NULL); GST_INFO ("filename : %s", src->filename); GST_INFO ("uri : %s", src->uri); } g_object_notify (G_OBJECT (src), "location"); /* FIXME 2.0: notify "uri" property once there is one */ return TRUE; /* ERROR */ wrong_state: { g_warning ("Changing the `location' property on filesrc when a file is " "open is not supported."); GST_OBJECT_UNLOCK (src); return FALSE; } }
/* Test Callbacks and vmethods*/ static GstPipeline * create_pipeline (InsanityGstPipelineTest * ptest, gpointer unused_data) { GstElementFactory *decofactory = NULL; GError *err = NULL; InsanityTest *test = INSANITY_TEST (ptest); gchar *decodername = NULL, *uri = NULL, *location = NULL; const gchar *klass; DECODER_TEST_LOCK (); glob_pipeline = GST_ELEMENT (gst_pipeline_new ("pipeline")); /* Create the source */ insanity_test_get_boolean_argument (test, "push-mode", (gboolean *) & glob_push_mode); insanity_test_get_string_argument (test, "location", &location); if (location == NULL || g_strcmp0 (location, "") == 0) { ERROR (test, "Location name not set"); goto failed; } uri = gst_filename_to_uri (location, &err); if (err != NULL) { ERROR (test, "Error creating uri %s", err->message); goto failed; } else if (glob_push_mode == FALSE) { glob_src = gst_element_factory_make ("filesrc", "src"); } else { gchar *tmpuri; glob_src = gst_element_factory_make ("pushfilesrc", "src"); tmpuri = g_strconcat ("push", uri, NULL); g_free (uri); uri = tmpuri; } gst_uri_handler_set_uri (GST_URI_HANDLER (glob_src), uri, &err); if (err != NULL) { ERROR (test, "Error setting uri %s", err->message); goto failed; } if (!insanity_test_get_string_argument (test, "decoder-name", &decodername) || g_strcmp0 (decodername, "") == 0) { ERROR (test, "Decoder name not set"); goto failed; } /* ... create the decoder, will not be used until we typefind and * plug the demuxer */ glob_decoder = gst_element_factory_make (decodername, "decoder"); if (glob_decoder == NULL) goto failed; /* We check wether the element is a parser or not */ decofactory = gst_element_get_factory (glob_decoder); klass = gst_element_factory_get_metadata (decofactory, GST_ELEMENT_METADATA_KLASS); glob_testing_parser = g_strrstr (klass, "Parser") ? TRUE : FALSE; if (glob_testing_parser == FALSE && g_strrstr (klass, "Decoder") == NULL) { gchar *val_test = g_strdup_printf ("%s not a decoder nor a parser as" " neither of \"Decoder\" nor \"parser\" where present in the element" " factory klass: %s", decodername, klass); insanity_test_validate_checklist_item (test, "testing-decoder-or-parser", FALSE, val_test); g_free (val_test); goto failed; } else { insanity_test_validate_checklist_item (test, "testing-decoder-or-parser", TRUE, NULL); } if (glob_testing_parser == FALSE) { GstCaps *decode_sinkcaps = NULL; GList *tmp, *parsers; const GList *template; for (template = gst_element_factory_get_static_pad_templates (decofactory);
int main (int argc, gchar ** argv) { GError *err = NULL; GOptionContext *ctx; GESPipeline *pipeline; GESTimeline *timeline; GESTrack *tracka, *trackv; GESLayer *layer1, *layer2; GESUriClip *src; GMainLoop *mainloop; gint inpoint = 0, duration = 10; gboolean mute = FALSE; gchar *audiofile = NULL; GOptionEntry options[] = { {"inpoint", 'i', 0, G_OPTION_ARG_INT, &inpoint, "in-point in the file (in seconds, default:0s)", "seconds"}, {"duration", 'd', 0, G_OPTION_ARG_INT, &duration, "duration to use from the file (in seconds, default:10s)", "seconds"}, {"mute", 'm', 0, G_OPTION_ARG_NONE, &mute, "Whether to mute the audio from the file",}, {"audiofile", 'a', 0, G_OPTION_ARG_FILENAME, &audiofile, "Use this audiofile instead of the original audio from the file", "audiofile"}, {NULL} }; ctx = g_option_context_new ("- Plays an video file with sound (origin/muted/replaced)"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing %s\n", err->message); exit (1); } if (argc == 1) { g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL)); exit (0); } g_option_context_free (ctx); ges_init (); /* Create an Audio/Video pipeline with two layers */ pipeline = ges_pipeline_new (); timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); trackv = GES_TRACK (ges_video_track_new ()); layer1 = ges_layer_new (); layer2 = ges_layer_new (); g_object_set (layer2, "priority", 1, NULL); if (!ges_timeline_add_layer (timeline, layer1) || !ges_timeline_add_layer (timeline, layer2) || !ges_timeline_add_track (timeline, tracka) || !ges_timeline_add_track (timeline, trackv) || !ges_pipeline_set_timeline (pipeline, timeline)) return -1; if (1) { gchar *uri = gst_filename_to_uri (argv[1], NULL); /* Add the main audio/video file */ src = ges_uri_clip_new (uri); g_free (uri); g_object_set (src, "start", 0, "in-point", inpoint * GST_SECOND, "duration", duration * GST_SECOND, "mute", mute, NULL); ges_layer_add_clip (layer1, GES_CLIP (src)); } /* Play the pipeline */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); mainloop = g_main_loop_new (NULL, FALSE); g_timeout_add_seconds (duration + 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }