static void on_stream_status (GstBus *bus, GstMessage *message, gpointer user_data) { GstStreamStatusType type; GstElement *owner; const GValue *val; GstTask *task = NULL; gst_message_parse_stream_status (message, &type, &owner); val = gst_message_get_stream_status_object (message); /* see if we know how to deal with this object */ if (G_VALUE_TYPE (val) == GST_TYPE_TASK) { task = g_value_get_object (val); } switch (type) { case GST_STREAM_STATUS_TYPE_CREATE: if (task) { GstTaskPool *pool; pool = test_rt_pool_new(); gst_task_set_pool (task, pool); } break; default: break; } }
/*! * \brief handleMessage * Handles gstreamer bus messages. Mainly for debugging purposes and ensuring clean shutdown on error */ void handleMessage(GstElement * pipeline) { GError *err = NULL; gchar *debug = NULL; GstBus* bus = NULL; GstStreamStatusType tp; GstElement * elem = NULL; GstMessage* msg = NULL; bus = gst_element_get_bus(pipeline); while(gst_bus_have_pending(bus)) { msg = gst_bus_pop(bus); //printf("Got %s message\n", GST_MESSAGE_TYPE_NAME(msg)); if(gst_is_missing_plugin_message(msg)) { //ERROR(1, "GStreamer: your gstreamer installation is missing a required plugin\n"); fprintf(stderr, "GStreamer: your gstreamer installation is missing a required plugin\n"); } else { switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_STATE_CHANGED: GstState oldstate, newstate, pendstate; gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); //fprintf(stderr, "state changed from %s to %s (pending: %s)\n", gst_element_state_get_name(oldstate), // gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate)); break; case GST_MESSAGE_ERROR: gst_message_parse_error(msg, &err, &debug); //fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n", // gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); g_error_free(err); g_free(debug); gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); break; case GST_MESSAGE_EOS: //fprintf(stderr, "reached the end of the stream."); break; case GST_MESSAGE_STREAM_STATUS: gst_message_parse_stream_status(msg,&tp,&elem); //fprintf(stderr, "stream status: elem %s, %i\n", GST_ELEMENT_NAME(elem), tp); break; default: //fprintf(stderr, "unhandled message\n"); break; } } gst_message_unref(msg); } gst_object_unref(GST_OBJECT(bus)); }
static gboolean bus_call (GstBus *bus,GstMessage *msg, gpointer data) { if (gst_is_video_overlay_prepare_window_handle_message (msg)) { if (0 != g_video_xid) { GstVideoOverlay *overlay; // GST_MESSAGE_SRC (message) will be the video sink element overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (msg)); gst_video_overlay_set_window_handle (overlay, g_video_xid); } else { g_warning ("Should have obtained video_window_handle by now!"); } } switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("####################### Stream Ends\n"); gtk_main_quit(); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("####################### Error: %s\n", error->message); g_error_free (error); gtk_main_quit(); break; } case GST_MESSAGE_STATE_CHANGED: { GstState oldState; GstState newState; GstState pendingState; gst_message_parse_state_changed(msg, &oldState, &newState, &pendingState); g_printf("####################### oldState:%d, newState:%d, pendingState:%d!\n", oldState, newState, pendingState); break; } case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType statusType; GstElement* owner = NULL; gst_message_parse_stream_status(msg, &statusType, &owner); g_printf("####################### statusType:%d, owner:%p!\n", statusType, owner); break; } default: break; } return TRUE; }
void GStreamerBaseFrameSourceImpl::handleGStreamerMessages() { GstMessage* msg = NULL; GError *err = NULL; gchar *debug = NULL; GstStreamStatusType tp; GstElement * elem = NULL; if (!bus) return; while (gst_bus_have_pending(bus)) { msg = gst_bus_pop(bus); if (gst_is_missing_plugin_message(msg)) { printf("GStreamer: your gstreamer installation is missing a required plugin!\n"); end = true; } else { switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_STATE_CHANGED: GstState oldstate, newstate, pendstate; gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); break; case GST_MESSAGE_ERROR: { gst_message_parse_error(msg, &err, &debug); std::unique_ptr<char[], GlibDeleter> name(gst_element_get_name(GST_MESSAGE_SRC (msg))); printf("GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n", name.get(), err->message); g_error_free(err); g_free(debug); end = true; break; } case GST_MESSAGE_EOS: end = true; break; case GST_MESSAGE_STREAM_STATUS: gst_message_parse_stream_status(msg,&tp,&elem); break; default: break; } } gst_message_unref(msg); } }
static GstBusSyncReply sync_bus_handler (GstBus * bus, GstMessage * message, GstElement * bin) { switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType type; GstElement *owner; const GValue *val; gchar *path; GstTask *task = NULL; g_message ("received STREAM_STATUS"); gst_message_parse_stream_status (message, &type, &owner); val = gst_message_get_stream_status_object (message); g_message ("type: %d", type); path = gst_object_get_path_string (GST_MESSAGE_SRC (message)); g_message ("source: %s", path); g_free (path); path = gst_object_get_path_string (GST_OBJECT (owner)); g_message ("owner: %s", path); g_free (path); g_message ("object: type %s, value %p", G_VALUE_TYPE_NAME (val), g_value_get_object (val)); /* see if we know how to deal with this object */ if (G_VALUE_TYPE (val) == GST_TYPE_TASK) { task = g_value_get_object (val); } switch (type) { case GST_STREAM_STATUS_TYPE_CREATE: g_message ("created task %p", task); break; case GST_STREAM_STATUS_TYPE_ENTER: /* g_message ("raising task priority"); */ /* setpriority (PRIO_PROCESS, 0, -10); */ break; case GST_STREAM_STATUS_TYPE_LEAVE: break; default: break; } break; } default: break; } /* pass all messages on the async queue */ return GST_BUS_PASS; }
void GstEnginePipeline::StreamStatusMessageReceived(GstMessage* msg) { GstStreamStatusType type; GstElement* owner; gst_message_parse_stream_status(msg, &type, &owner); if (type == GST_STREAM_STATUS_TYPE_CREATE) { const GValue* val = gst_message_get_stream_status_object(msg); if (G_VALUE_TYPE(val) == GST_TYPE_TASK) { GstTask* task = static_cast<GstTask*>(g_value_get_object(val)); gst_task_set_enter_callback(task, &TaskEnterCallback, this, NULL); } } }
gboolean bus_callback(GstBus* sender, GstMessage* message, void* data) { gPlay* gplay = reinterpret_cast<gPlay*> (data); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_STATE_CHANGED: { GstState newState; gst_message_parse_state_changed(message, NULL, &newState, NULL); std::string message_name(GST_MESSAGE_SRC_NAME(message));//TODO: Avoid this copy using glib if (message_name.compare("playbin") == 0){ gplay->on_state_changed(newState); } } break; case GST_MESSAGE_TAG: { GstTagList* tag_list = 0; gst_message_parse_tag(message, &tag_list); Track t; track_from_tag(tag_list, &t); gplay->on_tag_found(t); gst_tag_list_free(tag_list); } break; case GST_MESSAGE_EOS: gplay->on_eos(); break; case GST_MESSAGE_STREAM_STATUS: GstStreamStatusType message_type; gst_message_parse_stream_status(message, &message_type, NULL); g_print("Stream status: %d\n", message_type); break; default: g_print("Message from %s: %s\n", GST_MESSAGE_SRC_NAME(message), gst_message_type_get_name(GST_MESSAGE_TYPE(message))); break; } //TODO: Should I dispose message? return true; }
static gboolean bus_call (GstBus *bus,GstMessage *msg, gpointer data){ GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("Stream Ends\n"); g_main_loop_quit (loop); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("Error: %s\n", error->message); g_error_free (error); g_main_loop_quit (loop); break; } case GST_MESSAGE_STATE_CHANGED: { GstState oldState; GstState newState; GstState pendingState; gst_message_parse_state_changed(msg, &oldState, &newState, &pendingState); g_printf("oldState:%d, newState:%d, pendingState:%d!\n", oldState, newState, pendingState); break; } case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType statusType; GstElement* owner = NULL; gst_message_parse_stream_status(msg, &statusType, &owner); g_printf("statusType:%d, owner:%p!\n", statusType, owner); break; } default: break; } return TRUE; }
void GstEnginePipeline::StreamStatusMessageReceived(GstMessage* msg) { GstStreamStatusType type; GstElement* owner; gst_message_parse_stream_status(msg, &type, &owner); if (type == GST_STREAM_STATUS_TYPE_CREATE) { const GValue* val = gst_message_get_stream_status_object(msg); if (G_VALUE_TYPE(val) == GST_TYPE_TASK) { GstTask* task = static_cast<GstTask*>(g_value_get_object(val)); GstTaskThreadCallbacks callbacks; memset(&callbacks, 0, sizeof(callbacks)); callbacks.enter_thread = TaskEnterCallback; gst_task_set_thread_callbacks(task, &callbacks, this, nullptr); } } }
static void fs_rtp_conference_handle_message ( GstBin * bin, GstMessage * message) { FsRtpConference *self = FS_RTP_CONFERENCE (bin); if (!self->rtpbin) goto out; switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ELEMENT: { const GstStructure *s = gst_message_get_structure (message); /* we change the structure name and add the session ID to it */ if (gst_structure_has_name (s, "application/x-rtp-source-sdes") && gst_structure_has_field_typed (s, "session", G_TYPE_UINT) && gst_structure_has_field_typed (s, "ssrc", G_TYPE_UINT) && gst_structure_has_field_typed (s, "cname", G_TYPE_STRING)) { guint session_id; guint ssrc; const GValue *val; FsRtpSession *session; const gchar *cname; val = gst_structure_get_value (s, "session"); session_id = g_value_get_uint (val); val = gst_structure_get_value (s, "ssrc"); ssrc = g_value_get_uint (val); cname = gst_structure_get_string (s, "cname"); if (!ssrc || !cname) { GST_WARNING_OBJECT (self, "Got GstRTPBinSDES without a ssrc or a cname (ssrc:%u cname:%p)", ssrc, cname); break; } session = fs_rtp_conference_get_session_by_id (self, session_id); if (session) { fs_rtp_session_associate_ssrc_cname (session, ssrc, cname); g_object_unref (session); } else { GST_WARNING_OBJECT (self,"Our RtpBin announced a new association" "for non-existent session %u for ssrc: %u and cname %s", session_id, ssrc, cname); } } else if (gst_structure_has_name (s, "dtmf-event-processed") || gst_structure_has_name (s, "dtmf-event-dropped")) { GList *item; guint cookie; GST_OBJECT_LOCK (self); restart: cookie = self->priv->sessions_cookie; for (item = self->priv->sessions; item; item = item->next) { GST_OBJECT_UNLOCK (self); if (fs_rtp_session_handle_dtmf_event_message (item->data, message)) { gst_message_unref (message); message = NULL; goto out; } GST_OBJECT_LOCK (self); if (cookie != self->priv->sessions_cookie) goto restart; } GST_OBJECT_UNLOCK (self); } } break; case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType type; guint i; gst_message_parse_stream_status (message, &type, NULL); switch (type) { case GST_STREAM_STATUS_TYPE_ENTER: GST_OBJECT_LOCK (self); for (i = 0; i < self->priv->threads->len; i++) { if (g_ptr_array_index (self->priv->threads, i) == g_thread_self ()) goto done; } g_ptr_array_add (self->priv->threads, g_thread_self ()); done: GST_OBJECT_UNLOCK (self); break; case GST_STREAM_STATUS_TYPE_LEAVE: GST_OBJECT_LOCK (self); while (g_ptr_array_remove_fast (self->priv->threads, g_thread_self ())); GST_OBJECT_UNLOCK (self); break; default: /* Do nothing */ break; } } break; default: break; } out: /* forward all messages to the parent */ if (message) GST_BIN_CLASS (fs_rtp_conference_parent_class)->handle_message (bin, message); }
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) { struct input_handle *ih = (struct input_handle *) data; /* if (verbose) fprintf(stderr, "%p %s %s\n", bus, GST_MESSAGE_TYPE_NAME(msg), ih->filename); */ switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ASYNC_DONE:{ ih->quit_pipeline = FALSE; ih->ready = TRUE; break; } case GST_MESSAGE_EOS:{ if (verbose) g_print("End-of-stream\n"); ih->ready = TRUE; g_static_mutex_lock(&gstreamer_mutex); if (!ih->main_loop_quit) { ih->main_loop_quit = TRUE; g_main_loop_quit(ih->loop); } g_static_mutex_unlock(&gstreamer_mutex); break; } case GST_MESSAGE_ERROR:{ gchar *debug; GError *err; gst_message_parse_error (msg, &err, &debug); g_free (debug); if (verbose) g_print("%p Error: %s\n", bus, err->message); g_error_free (err); ih->ready = TRUE; g_static_mutex_lock(&gstreamer_mutex); if (!ih->main_loop_quit) { ih->main_loop_quit = TRUE; g_main_loop_quit(ih->loop); } g_static_mutex_unlock(&gstreamer_mutex); break; } case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state; gst_message_parse_state_changed (msg, &old_state, &new_state, NULL); /* if (verbose) g_print ("Element %s changed state from %s to %s.\n", GST_OBJECT_NAME (msg->src), gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); */ break; } case GST_MESSAGE_STREAM_STATUS:{ GstStreamStatusType type; GstElement *owner; gst_message_parse_stream_status(msg, &type, &owner); /* if (verbose) g_print("%p New Stream Type: %d\n", bus, type); */ break; } default: break; } return TRUE; }
bool GstPipelineWrapper::GstMessageParser(GstBus* bus, GstMessage* msg, GstPipelineWrapper* pipeline) { if (!pipeline->get_is_verbose()) return true; if (msg != NULL) { GError* err = 0; gchar* debug_info = 0; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); break; case GST_MESSAGE_WARNING: gst_message_parse_warning(msg, &err, &debug_info); g_printerr ("Warning received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); break; case GST_MESSAGE_INFO: gst_message_parse_info(msg, &err, &debug_info); g_printerr ("Info received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); break; case GST_MESSAGE_STATE_CHANGED: GstState old_state, new_state; gst_message_parse_state_changed(msg, &old_state, &new_state, 0); g_print ("Element %s changed state from %s to %s.\n", GST_OBJECT_NAME (msg->src), gst_element_state_get_name (old_state),gst_element_state_get_name (new_state)); break; case GST_MESSAGE_QOS: break; case GST_MESSAGE_STREAM_STATUS: GstStreamStatusType stream_status_type; GstElement* owner; const gchar* stream_status_type_string; gst_message_parse_stream_status(msg, &stream_status_type, &owner); switch (stream_status_type) { case GST_STREAM_STATUS_TYPE_CREATE : stream_status_type_string = "CREATE"; break; case GST_STREAM_STATUS_TYPE_ENTER : stream_status_type_string = "ENTER"; break; case GST_STREAM_STATUS_TYPE_LEAVE : stream_status_type_string = "LEAVE"; break; case GST_STREAM_STATUS_TYPE_DESTROY : stream_status_type_string = "DESTROY"; break; case GST_STREAM_STATUS_TYPE_START : stream_status_type_string = "START"; break; case GST_STREAM_STATUS_TYPE_PAUSE : stream_status_type_string = "PAUSE"; break; case GST_STREAM_STATUS_TYPE_STOP : stream_status_type_string = "STOP"; break; } g_printerr ("STREAM STATUS received from element %s: %s\n", GST_OBJECT_NAME (owner), stream_status_type_string); //g_free (stream_status_type_string); break; default: g_printerr ("Unparsed message received of type: %s\n", gst_message_type_get_name(GST_MESSAGE_TYPE(msg))); break; } } return true; }
static gboolean bus_call(GstBus *bus, GstMessage *message, gpointer data)//lecture des message sur le pipeline { GstElement *play = GST_ELEMENT(data);; switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: //message d'erreur on ferme la fenêtre g_print("fenetre fermé\n"); g_main_loop_quit (loop); break; case GST_MESSAGE_EOS://fin de lecture if (!gst_element_seek(play, //permet de revenir en debut de lecture 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0,//point de lecture 0 GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) { //g_print("Seek failed!\n"); } else { /*g_print("Seek succeed!\n");*/ } break; case GST_MESSAGE_STREAM_STATUS://changement de status { GstStreamStatusType type; GstElement *owner; const GValue *val; gchar *path; GstTask *task = NULL; // g_message ("received STREAM_STATUS"); gst_message_parse_stream_status (message, &type, &owner); //g_message ("type: %d", type); switch (type) { case GST_STREAM_STATUS_TYPE_ENTER://entrée en lecture //printf("fer%d\n",cpt); if(cpt==0) { //printf("fer%d\n",cpt); if (!gst_element_seek_simple(play, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,search*1e9)) {//aller au point de lecture "search*1e9", 1e9 pour convertir en seconde //g_print("Seek failed!\n"); } else { cpt++; } } break; default: //g_print("Seek fad!\n"); break; } break; } default: break; } return TRUE; }
ElementPtr StreamStatusMessage::owner() const { GstElement *e; gst_message_parse_stream_status(object<GstMessage>(), NULL, &e); return ElementPtr::wrap(e); }
StreamStatusType StreamStatusMessage::statusType() const { GstStreamStatusType t; gst_message_parse_stream_status(object<GstMessage>(), &t, NULL); return static_cast<StreamStatusType>(t); }
void eServiceMP3Record::gstBusCall(GstMessage *msg) { if (!msg) return; ePtr<iRecordableService> ptr = this; gchar *sourceName; GstObject *source; source = GST_MESSAGE_SRC(msg); if (!GST_IS_OBJECT(source)) return; sourceName = gst_object_get_name(source); switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: eDebug("[eMP3ServiceRecord] gstBusCall eos event"); // Stream end -> stop recording m_event((iRecordableService*)this, evGstRecordEnded); break; case GST_MESSAGE_STATE_CHANGED: { if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_recording_pipeline)) break; GstState old_state, new_state; gst_message_parse_state_changed(msg, &old_state, &new_state, NULL); if(old_state == new_state) break; GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state); eDebug("[eMP3ServiceRecord] gstBusCall state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state)); switch(transition) { case GST_STATE_CHANGE_PAUSED_TO_PLAYING: { if (m_streamingsrc_timeout) m_streamingsrc_timeout->stop(); break; } default: break; } break; } case GST_MESSAGE_ERROR: { gchar *debug; GError *err; gst_message_parse_error(msg, &err, &debug); g_free(debug); if (err->code != GST_STREAM_ERROR_CODEC_NOT_FOUND) eWarning("[eServiceMP3Record] gstBusCall Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName); g_error_free(err); break; } case GST_MESSAGE_ELEMENT: { const GstStructure *msgstruct = gst_message_get_structure(msg); if (msgstruct) { if (gst_is_missing_plugin_message(msg)) { GstCaps *caps = NULL; gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL); if (caps) { std::string codec = (const char*) gst_caps_to_string(caps); eDebug("[eServiceMP3Record] gstBusCall cannot record because of incompatible codecs %s", codec.c_str()); gst_caps_unref(caps); } } else { const gchar *eventname = gst_structure_get_name(msgstruct); if (eventname) { if (!strcmp(eventname, "redirect")) { const char *uri = gst_structure_get_string(msgstruct, "new-location"); eDebug("[eServiceMP3Record] gstBusCall redirect to %s", uri); gst_element_set_state (m_recording_pipeline, GST_STATE_NULL); g_object_set(G_OBJECT (m_source), "uri", uri, NULL); gst_element_set_state (m_recording_pipeline, GST_STATE_PLAYING); } } } } break; } case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType type; GstElement *owner; gst_message_parse_stream_status (msg, &type, &owner); if (type == GST_STREAM_STATUS_TYPE_CREATE) { if (GST_IS_PAD(source)) owner = gst_pad_get_parent_element(GST_PAD(source)); else if (GST_IS_ELEMENT(source)) owner = GST_ELEMENT(source); else owner = 0; if (owner) { GstState state; gst_element_get_state(m_recording_pipeline, &state, NULL, 0LL); GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner)); const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory)); if (!strcmp(name, "souphttpsrc") && (state == GST_STATE_READY) && !m_streamingsrc_timeout->isActive()) { m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true); g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL); eDebug("[eServiceMP3Record] gstBusCall setting timeout on %s to %is", name, HTTP_TIMEOUT); } } if (GST_IS_PAD(source)) gst_object_unref(owner); } break; } default: break; } g_free(sourceName); }
void eServiceMP3::gstBusCall(GstMessage *msg) { if (!msg) return; gchar *sourceName; GstObject *source; source = GST_MESSAGE_SRC(msg); if (!GST_IS_OBJECT(source)) return; sourceName = gst_object_get_name(source); #if 0 gchar *string; if (gst_message_get_structure(msg)) string = gst_structure_to_string(gst_message_get_structure(msg)); else string = g_strdup(GST_MESSAGE_TYPE_NAME(msg)); eDebug("eTsRemoteSource::gst_message from %s: %s", sourceName, string); g_free(string); #endif switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: m_event((iPlayableService*)this, evEOF); break; case GST_MESSAGE_STATE_CHANGED: { if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin)) break; GstState old_state, new_state; gst_message_parse_state_changed(msg, &old_state, &new_state, NULL); if(old_state == new_state) break; eDebug("eServiceMP3::state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state)); GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state); switch(transition) { case GST_STATE_CHANGE_NULL_TO_READY: { } break; case GST_STATE_CHANGE_READY_TO_PAUSED: { GstElement *subsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink"); if (subsink) { #ifdef GSTREAMER_SUBTITLE_SYNC_MODE_BUG /* * HACK: disable sync mode for now, gstreamer suffers from a bug causing sparse streams to loose sync, after pause/resume / skip * see: https://bugzilla.gnome.org/show_bug.cgi?id=619434 * Sideeffect of using sync=false is that we receive subtitle buffers (far) ahead of their * display time. * Not too far ahead for subtitles contained in the media container. * But for external srt files, we could receive all subtitles at once. * And not just once, but after each pause/resume / skip. * So as soon as gstreamer has been fixed to keep sync in sparse streams, sync needs to be re-enabled. */ g_object_set (G_OBJECT (subsink), "sync", FALSE, NULL); #endif #if 0 /* we should not use ts-offset to sync with the decoder time, we have to do our own decoder timekeeping */ g_object_set (G_OBJECT (subsink), "ts-offset", -2L * GST_SECOND, NULL); /* late buffers probably will not occur very often */ g_object_set (G_OBJECT (subsink), "max-lateness", 0L, NULL); /* avoid prerolling (it might not be a good idea to preroll a sparse stream) */ g_object_set (G_OBJECT (subsink), "async", TRUE, NULL); #endif eDebug("eServiceMP3::subsink properties set!"); gst_object_unref(subsink); } setAC3Delay(ac3_delay); setPCMDelay(pcm_delay); } break; case GST_STATE_CHANGE_PAUSED_TO_PLAYING: { if ( m_sourceinfo.is_streaming && m_streamingsrc_timeout ) m_streamingsrc_timeout->stop(); } break; case GST_STATE_CHANGE_PLAYING_TO_PAUSED: { } break; case GST_STATE_CHANGE_PAUSED_TO_READY: { } break; case GST_STATE_CHANGE_READY_TO_NULL: { } break; } break; } case GST_MESSAGE_ERROR: { gchar *debug; GError *err; gst_message_parse_error (msg, &err, &debug); g_free (debug); eWarning("Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName ); if ( err->domain == GST_STREAM_ERROR ) { if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND ) { if ( g_strrstr(sourceName, "videosink") ) m_event((iPlayableService*)this, evUser+11); else if ( g_strrstr(sourceName, "audiosink") ) m_event((iPlayableService*)this, evUser+10); } } g_error_free(err); break; } case GST_MESSAGE_INFO: { gchar *debug; GError *inf; gst_message_parse_info (msg, &inf, &debug); g_free (debug); if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE ) { if ( g_strrstr(sourceName, "videosink") ) m_event((iPlayableService*)this, evUser+14); } g_error_free(inf); break; } case GST_MESSAGE_TAG: { GstTagList *tags, *result; gst_message_parse_tag(msg, &tags); result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE); if (result) { if (m_stream_tags) gst_tag_list_free(m_stream_tags); m_stream_tags = result; } const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0); if ( gv_image ) { GstBuffer *buf_image; buf_image = gst_value_get_buffer (gv_image); int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644); int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image)); close(fd); eDebug("eServiceMP3::/tmp/.id3coverart %d bytes written ", ret); m_event((iPlayableService*)this, evUser+13); } gst_tag_list_free(tags); m_event((iPlayableService*)this, evUpdatedInfo); break; } case GST_MESSAGE_ASYNC_DONE: { if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin)) break; GstTagList *tags; gint i, active_idx, n_video = 0, n_audio = 0, n_text = 0; g_object_get (m_gst_playbin, "n-video", &n_video, NULL); g_object_get (m_gst_playbin, "n-audio", &n_audio, NULL); g_object_get (m_gst_playbin, "n-text", &n_text, NULL); eDebug("eServiceMP3::async-done - %d video, %d audio, %d subtitle", n_video, n_audio, n_text); if ( n_video + n_audio <= 0 ) stop(); active_idx = 0; m_audioStreams.clear(); m_subtitleStreams.clear(); for (i = 0; i < n_audio; i++) { audioStream audio; gchar *g_codec, *g_lang; GstPad* pad = 0; g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad); GstCaps* caps = gst_pad_get_negotiated_caps(pad); if (!caps) continue; GstStructure* str = gst_caps_get_structure(caps, 0); const gchar *g_type = gst_structure_get_name(str); eDebug("AUDIO STRUCT=%s", g_type); audio.type = gstCheckAudioPad(str); g_codec = g_strdup(g_type); g_lang = g_strdup_printf ("und"); g_signal_emit_by_name (m_gst_playbin, "get-audio-tags", i, &tags); if ( tags && gst_is_tag_list(tags) ) { gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &g_codec); gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang); gst_tag_list_free(tags); } audio.language_code = std::string(g_lang); audio.codec = std::string(g_codec); eDebug("eServiceMP3::audio stream=%i codec=%s language=%s", i, g_codec, g_lang); m_audioStreams.push_back(audio); g_free (g_lang); g_free (g_codec); gst_caps_unref(caps); } for (i = 0; i < n_text; i++) { gchar *g_codec = NULL, *g_lang = NULL; g_signal_emit_by_name (m_gst_playbin, "get-text-tags", i, &tags); subtitleStream subs; g_lang = g_strdup_printf ("und"); if ( tags && gst_is_tag_list(tags) ) { gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang); gst_tag_list_get_string(tags, GST_TAG_SUBTITLE_CODEC, &g_codec); gst_tag_list_free(tags); } subs.language_code = std::string(g_lang); eDebug("eServiceMP3::subtitle stream=%i language=%s codec=%s", i, g_lang, g_codec); GstPad* pad = 0; g_signal_emit_by_name (m_gst_playbin, "get-text-pad", i, &pad); if ( pad ) g_signal_connect (G_OBJECT (pad), "notify::caps", G_CALLBACK (gstTextpadHasCAPS), this); subs.type = getSubtitleType(pad, g_codec); m_subtitleStreams.push_back(subs); g_free (g_lang); } m_event((iPlayableService*)this, evUpdatedInfo); if ( m_errorInfo.missing_codec != "" ) { if ( m_errorInfo.missing_codec.find("video/") == 0 || ( m_errorInfo.missing_codec.find("audio/") == 0 && getNumberOfTracks() == 0 ) ) m_event((iPlayableService*)this, evUser+12); } break; } case GST_MESSAGE_ELEMENT: { if (const GstStructure *msgstruct = gst_message_get_structure(msg)) { if ( gst_is_missing_plugin_message(msg) ) { GstCaps *caps; gst_structure_get (msgstruct, "detail", GST_TYPE_CAPS, &caps, NULL); std::string codec = (const char*) gst_caps_to_string(caps); gchar *description = gst_missing_plugin_message_get_description(msg); if ( description ) { eDebug("eServiceMP3::m_errorInfo.missing_codec = %s", codec.c_str()); m_errorInfo.error_message = "GStreamer plugin " + (std::string)description + " not available!\n"; m_errorInfo.missing_codec = codec.substr(0,(codec.find_first_of(','))); g_free(description); } gst_caps_unref(caps); } else { const gchar *eventname = gst_structure_get_name(msgstruct); if ( eventname ) { if (!strcmp(eventname, "eventSizeChanged") || !strcmp(eventname, "eventSizeAvail")) { gst_structure_get_int (msgstruct, "aspect_ratio", &m_aspect); gst_structure_get_int (msgstruct, "width", &m_width); gst_structure_get_int (msgstruct, "height", &m_height); if (strstr(eventname, "Changed")) m_event((iPlayableService*)this, evVideoSizeChanged); } else if (!strcmp(eventname, "eventFrameRateChanged") || !strcmp(eventname, "eventFrameRateAvail")) { gst_structure_get_int (msgstruct, "frame_rate", &m_framerate); if (strstr(eventname, "Changed")) m_event((iPlayableService*)this, evVideoFramerateChanged); } else if (!strcmp(eventname, "eventProgressiveChanged") || !strcmp(eventname, "eventProgressiveAvail")) { gst_structure_get_int (msgstruct, "progressive", &m_progressive); if (strstr(eventname, "Changed")) m_event((iPlayableService*)this, evVideoProgressiveChanged); } } } } break; } case GST_MESSAGE_BUFFERING: { GstBufferingMode mode; gst_message_parse_buffering(msg, &(m_bufferInfo.bufferPercent)); gst_message_parse_buffering_stats(msg, &mode, &(m_bufferInfo.avgInRate), &(m_bufferInfo.avgOutRate), &(m_bufferInfo.bufferingLeft)); m_event((iPlayableService*)this, evBuffering); break; } case GST_MESSAGE_STREAM_STATUS: { GstStreamStatusType type; GstElement *owner; gst_message_parse_stream_status (msg, &type, &owner); if ( type == GST_STREAM_STATUS_TYPE_CREATE && m_sourceinfo.is_streaming ) { if ( GST_IS_PAD(source) ) owner = gst_pad_get_parent_element(GST_PAD(source)); else if ( GST_IS_ELEMENT(source) ) owner = GST_ELEMENT(source); else owner = 0; if ( owner ) { GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner)); const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory)); if (!strcmp(name, "souphttpsrc")) { m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true); g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL); eDebug("eServiceMP3::GST_STREAM_STATUS_TYPE_CREATE -> setting timeout on %s to %is", name, HTTP_TIMEOUT); } } if ( GST_IS_PAD(source) ) gst_object_unref(owner); } break; } default: break; } g_free (sourceName); }