void GstVideoPlayerBackend::clear() { if (m_sink) g_object_unref(m_sink); /* stream doesn't support audio. Audio elemets have been unlinked from bus, but they stil are in PAUSED state. * Set their state to null to avoid warning on disposing */ if (!m_hasAudio && m_audioDecoder) { gst_element_set_state(m_audioDecoder, GST_STATE_NULL); gst_element_set_state(m_audioQueue, GST_STATE_NULL); gst_element_set_state(m_audioLink, GST_STATE_NULL); gst_element_set_state(m_audioResample, GST_STATE_NULL); gst_element_set_state(m_volumeController, GST_STATE_NULL); gst_element_set_state(m_audioSink, GST_STATE_NULL); } if (m_pipeline) { qDebug("gstreamer: Destroying pipeline"); /* Disable the message handlers to avoid anything calling back into this instance */ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline)); Q_ASSERT(bus); gst_bus_disable_sync_message_emission(bus); gst_object_unref(bus); gst_element_set_state(m_pipeline, GST_STATE_NULL); /* Ensure the transition to NULL completes */ gst_element_get_state(m_pipeline, 0, 0, GST_CLOCK_TIME_NONE); gst_object_unref(GST_OBJECT(m_pipeline)); } m_pipeline = m_videoLink = m_sink = m_audioLink = 0; m_audioDecoder = m_audioQueue = m_audioResample = m_volumeController = m_audioSink = 0; m_hasAudio = false; setVideoBuffer(0); m_state = Stopped; m_errorMessage.clear(); }
static gboolean gst_gnome_vfs_sink_uri_set_uri (GstURIHandler * handler, const gchar * uri) { GstGnomeVFSSink *sink = GST_GNOME_VFS_SINK (handler); GstState cur_state; gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0); if (cur_state == GST_STATE_PLAYING || cur_state == GST_STATE_PAUSED) { GST_WARNING_OBJECT (sink, "cannot set uri when PAUSED or PLAYING"); return FALSE; } g_object_set (sink, "location", uri, NULL); return TRUE; }
int music_get_state(struct music_rtp_pipeline *pipe){ GstState cur_state; GstStateChangeReturn state_change; do { /* 10 ms timeout. */ state_change = gst_element_get_state(pipe->pipeline, &cur_state, NULL, 10000000); } while ( state_change == GST_STATE_CHANGE_ASYNC ); /* Translate away from GST types. */ return (int) cur_state; }
static void test_shutdown_for_factory (const gchar * factory_name) { GstElement *pipeline, *src, *q, *ac, *vis, *cf, *q2, *sink; GstCaps *caps; guint i; pipeline = gst_pipeline_new (NULL); src = gst_check_setup_element ("audiotestsrc"); q = gst_check_setup_element ("queue"); ac = gst_check_setup_element ("audioconvert"); GST_INFO ("Using %s", factory_name); vis = gst_check_setup_element (factory_name); cf = gst_check_setup_element ("capsfilter"); caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, 320, "height", G_TYPE_INT, 240, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); g_object_set (cf, "caps", caps, NULL); gst_caps_unref (caps); q2 = gst_check_setup_element ("queue"); gst_object_set_name (GST_OBJECT (q2), "queue2"); sink = gst_check_setup_element ("fakesink"); /* don't want to sync against the clock, the more throughput the better */ g_object_set (src, "is-live", FALSE, NULL); g_object_set (sink, "sync", FALSE, NULL); gst_bin_add_many (GST_BIN (pipeline), src, q, ac, vis, cf, q2, sink, NULL); fail_if (!gst_element_link_many (src, q, ac, vis, cf, q2, sink, NULL)); /* now, wait until pipeline is running and then shut it down again; repeat; * this makes sure we can shut down cleanly while stuff is going on in the * chain function */ for (i = 0; i < 50; ++i) { gst_element_set_state (pipeline, GST_STATE_PAUSED); gst_element_get_state (pipeline, NULL, NULL, -1); gst_element_set_state (pipeline, GST_STATE_PLAYING); g_usleep (100); gst_element_set_state (pipeline, GST_STATE_NULL); } gst_object_unref (pipeline); }
static void cleanup_videorate (GstElement * videorate) { GST_DEBUG ("cleanup_videorate"); g_list_foreach (buffers, (GFunc) gst_mini_object_unref, NULL); g_list_free (buffers); buffers = NULL; gst_element_set_state (videorate, GST_STATE_NULL); gst_element_get_state (videorate, NULL, NULL, GST_CLOCK_TIME_NONE); gst_pad_set_active (mysrcpad, FALSE); gst_pad_set_active (mysinkpad, FALSE); gst_check_teardown_src_pad (videorate); gst_check_teardown_sink_pad (videorate); gst_check_teardown_element (videorate); }
static void gst_curl_tls_sink_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstCurlTlsSink *sink; GstState cur_state; g_return_if_fail (GST_IS_CURL_TLS_SINK (object)); sink = GST_CURL_TLS_SINK (object); gst_element_get_state (GST_ELEMENT (sink), &cur_state, NULL, 0); if (cur_state != GST_STATE_PLAYING && cur_state != GST_STATE_PAUSED) { GST_OBJECT_LOCK (sink); switch (prop_id) { case PROP_CA_CERT: g_free (sink->ca_cert); sink->ca_cert = g_value_dup_string (value); sink->insecure = FALSE; GST_DEBUG_OBJECT (sink, "ca_cert set to %s", sink->ca_cert); break; case PROP_CA_PATH: g_free (sink->ca_path); sink->ca_path = g_value_dup_string (value); sink->insecure = FALSE; GST_DEBUG_OBJECT (sink, "ca_path set to %s", sink->ca_path); break; case PROP_CRYPTO_ENGINE: g_free (sink->crypto_engine); sink->crypto_engine = g_value_dup_string (value); GST_DEBUG_OBJECT (sink, "crypto_engine set to %s", sink->crypto_engine); break; case PROP_INSECURE: sink->insecure = g_value_get_boolean (value); GST_DEBUG_OBJECT (sink, "insecure set to %d", sink->insecure); break; } GST_OBJECT_UNLOCK (sink); return; } GST_OBJECT_UNLOCK (sink); }
bool ofGstUtils::startPipeline(){ bPaused = true; speed = 1.0f; // pause the pipeline if(gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) { ofLog(OF_LOG_ERROR, "GStreamer: unable to set pipeline to paused\n"); return false; } // wait for paused state to query the duration if(!isStream){ GstState state = GST_STATE_PAUSED; if(gst_element_get_state(gstPipeline,&state,NULL,2*GST_SECOND)==GST_STATE_CHANGE_FAILURE){ return false; } } bLoaded = true; if(isAppSink){ // set the appsink to not emit signals, we are using callbacks instead // and frameByFrame to get buffers by polling instead of callback g_object_set (G_OBJECT (gstSink), "emit-signals", FALSE, "sync", !bFrameByFrame, (void*)NULL); if(!bFrameByFrame){ GstAppSinkCallbacks gstCallbacks; gstCallbacks.eos = &on_eos_from_source; gstCallbacks.new_preroll = &on_new_preroll_from_source; gstCallbacks.new_buffer = &on_new_buffer_from_source; gst_app_sink_set_callbacks(GST_APP_SINK(gstSink), &gstCallbacks, this, NULL); } } setSpeed(1.0); ofAddListener(ofEvents.update,this,&ofGstUtils::update); return true; }
DFBVideoProviderStatus gstreamer_getStatus(void) { GstState cur_state, pending_state; gst_element_get_state (gst_provider.play, &cur_state, &pending_state, 0); dprintf("%s: status %d pending %d\n", __func__, cur_state, pending_state); if (pending_state != GST_STATE_VOID_PENDING) cur_state = pending_state; switch (cur_state) { case GST_STATE_READY: return DVSTATE_STOP; case GST_STATE_PAUSED: case GST_STATE_PLAYING: return DVSTATE_PLAY; default: break; } return DVSTATE_FINISHED; }
static gchar * debug_dump_get_element_state (GstElement * element) { gchar *state_name = NULL; const gchar *state_icons = "~0-=>"; GstState state = GST_STATE_VOID_PENDING, pending = GST_STATE_VOID_PENDING; gst_element_get_state (element, &state, &pending, 0); if (pending == GST_STATE_VOID_PENDING) { gboolean is_locked = gst_element_is_locked_state (element); state_name = g_strdup_printf ("\\n[%c]%s", state_icons[state], (is_locked ? "(locked)" : "")); } else { state_name = g_strdup_printf ("\\n[%c] -> [%c]", state_icons[state], state_icons[pending]); } return state_name; }
gboolean transition_pipeline (GstElement *pipeline, GstState state) { gboolean fnval = TRUE; GstStateChangeReturn sret; sret = gst_element_set_state (GST_ELEMENT (pipeline), state); if (GST_STATE_CHANGE_ASYNC == sret) { if (GST_STATE_CHANGE_SUCCESS != gst_element_get_state (GST_ELEMENT (pipeline), &state, NULL, GST_CLOCK_TIME_NONE)) { g_warning ("Asynchronous state change failed"); fnval = FALSE; } } else if (sret != GST_STATE_CHANGE_SUCCESS) { g_warning ("State change failed."); fnval = FALSE; } return fnval; }
void MusicPlayer::rotatefile(void) { GstStateChangeReturn ret; int rotate = 0; GstFormat fmt = GST_FORMAT_TIME; gint64 posnow; if(gst_element_query_position(m_playbin,&fmt,&posnow)) { ret = gst_element_set_state(m_playbin, GST_STATE_READY); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Failed to set playbin to READY\n"); return; } if(rotate) g_object_set(G_OBJECT(m_playbin), "vflip-none", NULL, NULL); else g_object_set(G_OBJECT(m_playbin), "vflip-rotate", NULL, NULL); ret = gst_element_set_state(m_playbin, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Failed to set playbin to PLAYING\n"); return; } ret = gst_element_get_state (m_playbin, NULL, NULL, 30 * GST_SECOND); if (ret == GST_STATE_CHANGE_ASYNC) { g_printerr ("Failed to go to PLAYING in 30 seconds, bailing out\n"); return ; } else if (ret != GST_STATE_CHANGE_SUCCESS) { g_printerr ("State change to PLAYING failed\n"); return ; } g_print("gst_element_seek\n"); if (!gst_element_seek (m_playbin, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, posnow, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) g_print ("Seek failed!\n"); } }
gboolean player_set_file (Player *player, const char *file, char **error) { GstElementState new_state; g_return_val_if_fail (IS_PLAYER (player), FALSE); *error = NULL; if (player->priv->eos_idle_id > 0) { g_source_remove (player->priv->eos_idle_id); player->priv->eos_idle_id = 0; } if (!file) { player_stop (player); return FALSE; } switch (gst_element_get_state (GST_ELEMENT (player->priv->play))) { case GST_STATE_PLAYING: new_state = GST_STATE_PLAYING; player_stop (player); break; default: new_state = GST_STATE_READY; break; } g_free (player->priv->current_file); player->priv->current_file = g_strdup (file); gst_play_set_location (player->priv->play, file); gst_element_set_state (GST_ELEMENT (player->priv->play), new_state); player->priv->pos = 0; return TRUE; }
void GStreamerGWorld::exitFullscreen() { if (!m_dynamicPadName) return; // Get video sink bin and the elements to remove. GRefPtr<GstElement> videoSink; GstElement* sinkPtr = 0; g_object_get(m_pipeline, "video-sink", &sinkPtr, NULL); videoSink = adoptGRef(sinkPtr); GRefPtr<GstElement> tee = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoTee")); GRefPtr<GstElement> platformVideoSink = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "platformVideoSink")); GRefPtr<GstElement> queue = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "queue")); GRefPtr<GstElement> colorspace = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "colorspace")); GRefPtr<GstElement> videoScale = adoptGRef(gst_bin_get_by_name(GST_BIN(videoSink.get()), "videoScale")); // Get pads to unlink and remove. GRefPtr<GstPad> srcPad = adoptGRef(gst_element_get_static_pad(tee.get(), m_dynamicPadName.get())); GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink")); // Block data flow towards the pipeline branch to remove. No need // for pad blocking if the pipeline is paused. GstState state; gst_element_get_state(m_pipeline, &state, 0, 0); if (state < GST_STATE_PLAYING || gst_pad_set_blocked(srcPad.get(), true)) { // Unlink and release request pad. gst_pad_unlink(srcPad.get(), sinkPad.get()); gst_element_release_request_pad(tee.get(), srcPad.get()); // Unlink, remove and cleanup queue, ffmpegcolorspace, videoScale and sink. gst_element_unlink_many(queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_bin_remove_many(GST_BIN(videoSink.get()), queue.get(), colorspace.get(), videoScale.get(), platformVideoSink.get(), NULL); gst_element_set_state(platformVideoSink.get(), GST_STATE_NULL); gst_element_set_state(videoScale.get(), GST_STATE_NULL); gst_element_set_state(colorspace.get(), GST_STATE_NULL); gst_element_set_state(queue.get(), GST_STATE_NULL); } m_dynamicPadName.clear(); }
CAMLprim value ocaml_gstreamer_element_get_state(value _e) { CAMLparam1(_e); CAMLlocal1(ans); GstElement *e = Element_val(_e); GstStateChangeReturn ret; GstState state, pending; GstClockTime timeout = GST_CLOCK_TIME_NONE; /* TODO */ caml_release_runtime_system(); ret = gst_element_get_state(e, &state, &pending, timeout); caml_acquire_runtime_system(); ans = caml_alloc_tuple(3); Store_field(ans, 0, value_of_state_change_return(ret)); Store_field(ans, 1, val_of_state(state)); Store_field(ans, 2, val_of_state(pending)); CAMLreturn(ans); }
static void gst_xine_input_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstXineInput *xine = GST_XINE_INPUT (object); switch (prop_id) { case ARG_LOCATION: if (gst_element_get_state (GST_ELEMENT (xine)) != GST_STATE_NULL) return; if (xine->location) g_free (xine->location); xine->location = g_strdup (g_value_get_string (value)); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); return; } }
static void test_launch_synths (BT_TEST_ARGS) { BT_TEST_START; GST_INFO ("-- arrange --"); GstElement *pipeline = gst_parse_launch (synth_pipelines[_i], NULL); GstMessageType message_types = GST_MESSAGE_NEW_CLOCK | GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_STREAM_STATUS | GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_STREAM_START | GST_MESSAGE_TAG; GstMessageType tmessage = GST_MESSAGE_EOS; GST_INFO ("-- act --"); gst_element_set_state (pipeline, GST_STATE_PLAYING); GstStateChangeReturn ret = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE); GST_INFO ("-- assert --"); fail_unless (ret == GST_STATE_CHANGE_SUCCESS, "Couldn't set pipeline to PLAYING: %s", gst_element_state_change_return_get_name (ret)); GstBus *bus = gst_element_get_bus (pipeline); while (1) { GstMessageType rmessage = get_message_type (bus); if (rmessage == tmessage) { break; } else if (rmessage == GST_MESSAGE_UNKNOWN) { fail ("Unexpected timeout in gst_bus_poll, looking for %d", tmessage); break; } else if (rmessage & message_types) { continue; } fail ("Unexpected message received of type %d, '%s', looking for %d", rmessage, gst_message_type_get_name (rmessage), tmessage); } GST_INFO ("-- cleanup --"); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); BT_TEST_END; }
void Pipeline::configure () { #ifdef Q_WS_WIN m_loop = g_main_loop_new (NULL, FALSE); #endif if (m_videoLocation.isEmpty ()) { qDebug ("No video file specified. Using video test source."); m_pipeline = GST_PIPELINE (gst_parse_launch ("videotestsrc ! " "video/x-raw, width=640, height=480, " "framerate=(fraction)30/1 ! " "glupload ! gleffects effect=5 ! fakesink sync=1", NULL)); } else { QByteArray ba = m_videoLocation.toLocal8Bit (); qDebug ("Loading video: %s", ba.data ()); gchar *pipeline = g_strdup_printf ("filesrc name=f ! " "decodebin ! gleffects effect=5 ! " "fakesink sync=1"); m_pipeline = GST_PIPELINE (gst_parse_launch (pipeline, NULL)); GstElement *f = gst_bin_get_by_name (GST_BIN (m_pipeline), "f"); g_object_set (G_OBJECT (f), "location", ba.data (), NULL); gst_object_unref (GST_OBJECT (f)); g_free (pipeline); } m_bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (m_bus, (GstBusFunc) bus_call, this); gst_bus_enable_sync_message_emission (m_bus); g_signal_connect (m_bus, "sync-message", G_CALLBACK (sync_bus_call), this); gst_object_unref (m_bus); gst_element_set_state (GST_ELEMENT (this->m_pipeline), GST_STATE_PAUSED); GstState state = GST_STATE_PAUSED; if (gst_element_get_state (GST_ELEMENT (this->m_pipeline), &state, NULL, GST_CLOCK_TIME_NONE) != GST_STATE_CHANGE_SUCCESS) { qDebug ("failed to pause pipeline"); return; } }
static void test_play_twice_message_received (GstBus * bus, GstMessage * message, GstPipeline * bin) { gboolean res; GstStateChangeReturn state_res; GST_INFO ("bus message from \"%" GST_PTR_FORMAT "\": %" GST_PTR_FORMAT, GST_MESSAGE_SRC (message), message); switch (message->type) { case GST_MESSAGE_SEGMENT_DONE: play_count++; if (play_count == 1) { state_res = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY); ck_assert_int_ne (state_res, GST_STATE_CHANGE_FAILURE); /* prepare playing again */ state_res = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PAUSED); ck_assert_int_ne (state_res, GST_STATE_CHANGE_FAILURE); /* wait for completion */ state_res = gst_element_get_state (GST_ELEMENT (bin), NULL, NULL, GST_CLOCK_TIME_NONE); ck_assert_int_ne (state_res, GST_STATE_CHANGE_FAILURE); res = gst_element_send_event (GST_ELEMENT (bin), gst_event_ref (play_seek_event)); fail_unless (res == TRUE, NULL); state_res = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING); ck_assert_int_ne (state_res, GST_STATE_CHANGE_FAILURE); } else { g_main_loop_quit (main_loop); } break; default: g_assert_not_reached (); break; } }
static void start_pipeline (GstElement * bin, GstPad * pad) { GstStateChangeReturn ret; ogg_sync_init (&oggsync); eos_chain_states = g_hash_table_new_full (g_direct_hash, g_direct_equal, NULL, g_free); probe_id = gst_pad_add_buffer_probe (pad, G_CALLBACK (eos_buffer_probe), NULL); ret = gst_element_set_state (bin, GST_STATE_PLAYING); fail_if (ret == GST_STATE_CHANGE_FAILURE, "Could not start test pipeline"); if (ret == GST_STATE_CHANGE_ASYNC) { ret = gst_element_get_state (bin, NULL, NULL, GST_CLOCK_TIME_NONE); fail_if (ret != GST_STATE_CHANGE_SUCCESS, "Could not start test pipeline"); } }
static void stop_pipeline (GstElement * bin, GstPad * pad) { GstStateChangeReturn ret; ret = gst_element_set_state (bin, GST_STATE_NULL); fail_if (ret == GST_STATE_CHANGE_FAILURE, "Could not stop test pipeline"); if (ret == GST_STATE_CHANGE_ASYNC) { ret = gst_element_get_state (bin, NULL, NULL, GST_CLOCK_TIME_NONE); fail_if (ret != GST_STATE_CHANGE_SUCCESS, "Could not stop test pipeline"); } gst_pad_remove_buffer_probe (pad, (guint) probe_id); ogg_sync_clear (&oggsync); /* check end conditions, such as EOS flags */ g_hash_table_foreach_remove (eos_chain_states, (GHRFunc) check_chain_final_state, NULL); }
/* * Return the location in the stream. This is only relevant if the stream is * playing or paused. If the position of the track is a meaningless idea (i.e * the stream is null or something) then just return a -1. */ int64_t music_get_time_pos(struct music_rtp_pipeline *pipe){ gint64 pos = 0; gboolean ok; GstState cur_state = GST_STATE_NULL; GstFormat fmt = GST_FORMAT_TIME; gst_element_get_state(pipe->pipeline, &cur_state, NULL, 10000000); if ( cur_state != GST_STATE_PAUSED && cur_state != GST_STATE_PLAYING ) return -1; ok = gst_element_query_position(pipe->pipeline, &fmt, &pos); if ( ! ok ) return -1; /* Convert out of the gtype stuff. */ return (int64_t)pos; }
static gboolean stop_data(GstElement *source) { if (gst_element_is_locked_state(source) == FALSE) { if (gst_element_set_locked_state(source, TRUE) == TRUE) { std::clog << "##### LOCK OK" << std::endl; std::clog << "##### LOCK NOW" << std::endl; GstState rtspstate; gst_element_get_state(source, &rtspstate, NULL, GST_CLOCK_TIME_NONE); switch (rtspstate) { case GST_STATE_PLAYING: gst_element_set_state(source, GST_STATE_PAUSED); gst_element_set_locked_state(source, FALSE); break; case GST_STATE_PAUSED: gst_element_set_state(source, GST_STATE_READY); gst_element_set_locked_state(source, FALSE); break; case GST_STATE_READY: gst_element_set_state(source, GST_STATE_NULL); gst_element_set_locked_state(source, FALSE); break; case GST_STATE_NULL: gst_object_unref(GST_OBJECT(source)); g_idle_remove_by_data(source); break; } std::clog << "########## State: " << gst_element_state_get_name( rtspstate) << std::endl; // gst_object_unref(GST_OBJECT(source)); } } }
void ofGstUtils::close(){ if(bPlaying){ stop(); } if(bLoaded){ gst_element_set_state(GST_ELEMENT(gstPipeline), GST_STATE_NULL); gst_element_get_state(gstPipeline,NULL,NULL,2*GST_SECOND); // gst_object_unref(gstSink); this crashes, why?? ofEventArgs args; update(args); gst_object_unref(gstPipeline); gstPipeline = NULL; gstSink = NULL; } bLoaded = false; ofRemoveListener(ofEvents().update,this,&ofGstUtils::update); }
STDMETHODIMP_(KFILTER_STATE) IKGSTAudioPlayer::GetState() { GstState state; gst_element_get_state(pipeline, &state, NULL, GST_CLOCK_TIME_NONE ); switch(state) { case GST_STATE_NULL: return KState_Stopped; break; case GST_STATE_PLAYING: return KState_Running; break; case GST_STATE_READY: case GST_STATE_PAUSED: return KState_Paused; break; default: return KState_Unknown; } }
static void gst_uri_downloader_stop (GstUriDownloader * downloader) { GstPad *pad; GST_DEBUG_OBJECT (downloader, "Stopping source element"); /* remove the bus' sync handler */ gst_bus_set_sync_handler (downloader->priv->bus, NULL, NULL); /* unlink the source element from the internal pad */ pad = gst_pad_get_peer (downloader->priv->pad); if (pad) { gst_pad_unlink (pad, downloader->priv->pad); gst_object_unref (pad); } /* set the element state to NULL */ gst_element_set_state (downloader->priv->urisrc, GST_STATE_NULL); gst_element_get_state (downloader->priv->urisrc, NULL, NULL, GST_CLOCK_TIME_NONE); }
void VideoCompositor::addSource(GstPad *srcPad, QRect rect) { GstPad *sinkPad, *targetPad; QByteArray padName = QString("sink_%1").arg(numSink++).toLatin1(); GstElement *queue = gst_element_factory_make("queue", NULL); targetPad = gst_element_get_static_pad(queue, "sink"); GstElement *filter = gst_element_factory_make("capsfilter", NULL); QByteArray capsString = QByteArray("video/x-raw,format=I420"); if(rect.isValid())capsString.append(QString(",width=(int)%1,height=(int)%2").arg(rect.width()).arg(rect.height()).toLatin1()); GstCaps *caps = gst_caps_from_string(capsString.constData()); g_object_set(filter, "caps", caps, NULL); gst_bin_add_many(GST_BIN(bin), queue, filter, NULL); gst_caps_unref(caps); if(rect.isValid()) { GstElement *videoscale = gst_element_factory_make("videoscale", NULL); gst_bin_add(GST_BIN(bin), videoscale); gst_element_link_many(queue, videoscale, filter, videomixer, NULL); GstPad *mixPad = gst_element_get_static_pad(videomixer, padName.constData()); g_object_set(mixPad, "xpos", rect.x(), "ypos", rect.y(), NULL); gst_object_unref(mixPad); } else { gst_element_link_many(queue, filter, videomixer, NULL); } GstState state; gst_element_get_state(bin, &state, NULL, GST_SECOND); GstPad *ghost = gst_ghost_pad_new(padName.constData(), targetPad); if(state==GST_STATE_PLAYING)gst_pad_set_active(ghost, TRUE); gst_element_add_pad(bin, ghost); sinkPad = gst_element_get_static_pad(bin, padName.constData()); gst_pad_link(srcPad, sinkPad); gst_object_unref(sinkPad); gst_object_unref(targetPad); gst_object_unref(srcPad); }
P_INVOKE gboolean bp_open (BansheePlayer *player, const gchar *uri, gboolean maybe_video) { GstState state; g_return_val_if_fail (IS_BANSHEE_PLAYER (player), FALSE); // Build the pipeline if we need to if (player->playbin == NULL && !_bp_pipeline_construct (player)) { return FALSE; } // Give the CDDA code a chance to intercept the open request // in case it is able to perform a fast seek to a track if (_bp_cdda_handle_uri (player, uri)) { return TRUE; } else if (_bp_dvd_handle_uri (player, uri)) { return TRUE; } else if (player->playbin == NULL) { return FALSE; } // Set the pipeline to the proper state gst_element_get_state (player->playbin, &state, NULL, 0); if (state >= GST_STATE_PAUSED) { player->target_state = GST_STATE_READY; gst_element_set_state (player->playbin, GST_STATE_READY); } // Pass the request off to playbin g_object_set (G_OBJECT (player->playbin), "uri", uri, NULL); if (maybe_video) { // Lookup for subtitle files with same name/folder bp_lookup_for_subtitle (player, uri); } player->in_gapless_transition = FALSE; return TRUE; }
gboolean check_run_main_loop_until_msg_or_error (BtSong * song, const gchar * msg) { GstStateChangeReturn sret; GstState state, pending; GMainLoop *main_loop = g_main_loop_new (NULL, FALSE); GstElement *bin = (GstElement *) check_gobject_get_object_property (song, "bin"); GstBus *bus = gst_element_get_bus (bin); gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH); g_signal_connect (bus, "message::error", G_CALLBACK (_check_message_received), (gpointer) main_loop); g_signal_connect (bus, msg, G_CALLBACK (_check_message_received), (gpointer) main_loop); _check_run_main_loop_error = FALSE; sret = gst_element_get_state (bin, &state, &pending, G_GUINT64_CONSTANT (0)); // be careful to not run this when the song already finished if (sret != GST_STATE_CHANGE_FAILURE) { GST_INFO_OBJECT (song, "running main_loop: sret=%s, state=%s/%s", gst_element_state_change_return_get_name (sret), gst_element_state_get_name (state), gst_element_state_get_name (pending)); g_main_loop_run (main_loop); } else { GST_INFO_OBJECT (song, "skipping main_loop: sret=%s, state=%s/%s", gst_element_state_change_return_get_name (sret), gst_element_state_get_name (state), gst_element_state_get_name (pending)); } gst_bus_remove_signal_watch (bus); g_signal_handlers_disconnect_matched (bus, G_SIGNAL_MATCH_DATA, 0, 0, NULL, NULL, (gpointer) main_loop); gst_object_unref (bus); gst_object_unref (bin); g_main_loop_unref (main_loop); GST_INFO_OBJECT (song, "finished main_loop"); return sret == GST_STATE_CHANGE_FAILURE ? FALSE : !_check_run_main_loop_error; }
void ofGstUtils::setPaused(bool _bPause){ bPaused = _bPause; //timeLastIdle = ofGetElapsedTimeMillis(); if(bLoaded){ if(bPlaying){ if(bPaused){ gst_element_set_state (gstPipeline, GST_STATE_PAUSED); }else{ gst_element_set_state (gstPipeline, GST_STATE_PLAYING); } }else{ GstState state = GST_STATE_PAUSED; gst_element_set_state (gstPipeline, state); gst_element_get_state(gstPipeline,&state,NULL,2*GST_SECOND); if(!bPaused){ gst_element_set_state (gstPipeline, GST_STATE_PLAYING); } bPlaying = true; } } }
int gstreamer_stop(void) { if (!gst_provider.play) return 0; dprintf("%s: in\n", __func__); GstBus *bus = gst_element_get_bus (gst_provider.play); gst_bus_set_flushing (bus, TRUE); GstState cur_state; gst_element_get_state (gst_provider.play, &cur_state, NULL, 0); if (cur_state > GST_STATE_READY) { dprintf ("%s: stopping %s\n", __func__, gst_provider.uri); gst_element_set_state (gst_provider.play, GST_STATE_READY); } gst_object_unref (bus); dprintf("%s: out\n", __func__); return 0; }