static void on_new_pad (GstElement * dec, GstPad * pad, GstElement * sinkElement) { GstPad *sinkpad; if(pad==NULL) return; if(sinkElement==NULL) return; sinkpad = gst_element_get_static_pad (sinkElement, "sink"); if(sinkpad==NULL) return; if (!gst_pad_is_linked (sinkpad)) { GstPadLinkReturn ret = gst_pad_link (pad, sinkpad); if(ret == GST_PAD_LINK_NOFORMAT) { GstCaps* a, *b; a = gst_pad_get_current_caps(pad); b = gst_pad_get_current_caps(sinkpad); g_warning("Formats of A: %s\nFormats of B:%s\n", a ? gst_caps_to_string(a) : "<NULL>", b ? gst_caps_to_string(b) : "<NULL>"); gst_pad_unlink (pad, sinkpad); } else if(ret != GST_PAD_LINK_OK) { GstElement* parentA = gst_pad_get_parent_element(pad); GstElement* parentB = gst_pad_get_parent_element(sinkpad); g_error ("Failed to link pads! %s - %s : %d", gst_element_get_name(parentA), gst_element_get_name(parentB), ret); g_object_unref(parentA); g_object_unref(parentB); exit(3); } } gst_object_unref (sinkpad); }
/* XXX: this is a workaround to the absence of any proposer way to specify DMABUF memory capsfeatures or bufferpool option to downstream */ static gboolean has_dmabuf_capable_peer (GstVaapiPluginBase * plugin, GstPad * pad) { GstPad *other_pad = NULL; GstElement *element = NULL; gchar *element_name = NULL; gboolean is_dmabuf_capable = FALSE; gint v; gst_object_ref (pad); for (;;) { other_pad = gst_pad_get_peer (pad); gst_object_unref (pad); if (!other_pad) break; element = gst_pad_get_parent_element (other_pad); gst_object_unref (other_pad); if (!element) break; if (GST_IS_PUSH_SRC (element)) { element_name = gst_element_get_name (element); if (!element_name) break; if ((sscanf (element_name, "v4l2src%d", &v) != 1) && (sscanf (element_name, "camerasrc%d", &v) != 1)) break; v = 0; g_object_get (element, "io-mode", &v, NULL); if (strncmp (element_name, "camerasrc", 9) == 0) is_dmabuf_capable = v == 3; else is_dmabuf_capable = v == 5; /* "dmabuf-import" enum value */ break; } else if (GST_IS_BASE_TRANSFORM (element)) { element_name = gst_element_get_name (element); if (!element_name || sscanf (element_name, "capsfilter%d", &v) != 1) break; pad = gst_element_get_static_pad (element, "sink"); if (!pad) break; } else break; g_free (element_name); element_name = NULL; g_clear_object (&element); } g_free (element_name); g_clear_object (&element); return is_dmabuf_capable; }
/* Component instance initialization and parameter handling. */ component_setup() { // Initialize logging. mpf_private.loglevel = mpf_param_get_int("loglevel"); mpf_logger_init(&mpf_private.logger, mpf_private.loglevel, gst_element_get_name(GST_ELEMENT(component))); mpf_private.key = mpf_param_get_string("key"); if (!mpf_private.key) { mpf_private.key = gst_element_get_name(GST_ELEMENT(component)); } // Force init of GRDF to avoid multi-threading problems. grdf_init(); }
int GStreamer_setMedia(const char *uri) { GstElement *src, *sink; int is_video, is_http = 1; int ret = 0; if (!g_initialized) { g_error("GStreamer: library not initialized!\n"); return -1; } cleanup_pipeline(); pthread_mutex_lock(&g_mutex); g_position = 0; g_duration = 0; g_print("GStreamer: playing : %s\n", uri); is_video = strstr(uri, "264") != NULL; is_http = !strncmp(uri, "http://", strlen("http://")); g_printf("GStreamer: playing %s via %s\n", is_video ? "video" : "audio", is_http ? "http" : "filesrc"); if (is_http) g_object_set(G_OBJECT(g_httpsrc), "location", uri, NULL); else g_object_set(G_OBJECT(g_filesrc), "location", uri, NULL); //sink = (is_video) ? g_videosink : g_audiosink; sink = (is_video) ? create_video_sink() : create_audio_sink(); src = (is_http) ? g_httpsrc : g_filesrc; gst_bin_add_many(GST_BIN(g_pipeline), src, sink, NULL); if (!gst_element_link(src, sink)) { g_error("GStreamer: failed to link %s with %s\n", gst_element_get_name(src), gst_element_get_name(sink)); return -1; } gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_PLAYING); //gst_element_get_state(GST_ELEMENT(g_pipeline), ...); /* TODO what is signalled? */ pthread_cond_signal(&g_main_cond); pthread_mutex_unlock(&g_mutex); return ret; }
void link_or_die(GstElement *from, GstElement *to) { gboolean is_linked = gst_element_link(from, to); gchar *from_name = gst_element_get_name(from); gchar *to_name = gst_element_get_name(to); if (! is_linked) { g_error("Could not link %s to %s.\n", from_name, to_name); } g_free(from_name); g_free(to_name); }
void Pipeline::link_or_die(GstElement *from, GstElement *to) { bool is_linked = gst_element_link(from, to); gchar *from_name = gst_element_get_name(from); gchar *to_name = gst_element_get_name(to); if (!is_linked) { g_print("Could not link %s to %s.\n", from_name, to_name); exit(1); } g_free(from_name); g_free(to_name); }
static void add_element_used (InsanityGstPipelineTest * ptest, GstElement * element) { GstElementFactory *factory; const char *factory_name; char label[32], *element_name; GValue string_value = { 0 }; GstElement *parent; /* Only add once */ element_name = gst_element_get_name (element); if (g_hash_table_lookup_extended (ptest->priv->elements_used, element_name, NULL, NULL)) { g_free (element_name); return; } g_hash_table_insert (ptest->priv->elements_used, g_strdup (element_name), NULL); ptest->priv->element_count++; g_value_init (&string_value, G_TYPE_STRING); factory = gst_element_get_factory (element); factory_name = factory ? gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_LONGNAME) : "(no factory)"; g_value_take_string (&string_value, element_name); snprintf (label, sizeof (label), "elements-used.%u.name", ptest->priv->element_count); insanity_test_set_extra_info (INSANITY_TEST (ptest), label, &string_value); g_value_reset (&string_value); g_value_set_string (&string_value, factory_name); snprintf (label, sizeof (label), "elements-used.%u.factory", ptest->priv->element_count); insanity_test_set_extra_info (INSANITY_TEST (ptest), label, &string_value); g_value_reset (&string_value); parent = GST_ELEMENT (gst_element_get_parent (element)); if (parent) { g_value_take_string (&string_value, gst_element_get_name (parent)); snprintf (label, sizeof (label), "elements-used.%u.parent", ptest->priv->element_count); insanity_test_set_extra_info (INSANITY_TEST (ptest), label, &string_value); g_value_reset (&string_value); gst_object_unref (parent); } }
/** * \brief This function add the UDP element (udpsink) to the Service Provider's pipeline * \param pipeline the associated pipeline of the channel * \param pipeline the pipeline associated to this SP * \param bus the bus the channel * \param bus_watch_id an id watch on the bus * \param input last element added in pipeline to which we should link elements added * \param channel_entry_index the channel's index of this SP: to build the multicast address * \return last element added in pipeline, so udpsink if everything goes well */ static GstElement* addUDP( GstElement *pipeline, GstBus *bus, guint bus_watch_id, GstElement *input, long channel_entry_index){ /*Create element that will be add to the pipeline */ GstElement *udpsink; g_debug("add %s in Service Provider's pipeline", UDPSINK_NAME); /* Create the UDP sink */ udpsink = gst_element_factory_make_log ("udpsink", UDPSINK_NAME ); if ( !udpsink ) return NULL; set_udpsink_param(udpsink, channel_entry_index); /* add udpsink to pipeline */ if ( !gst_bin_add(GST_BIN (pipeline), udpsink )){ g_critical("Unable to add %s to pipeline", gst_element_get_name(udpsink)); return NULL; } return udpsink; }
static gboolean my_bus_callback (GstBus * bus, GstMessage * message, gpointer data) { switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR:{ GError *err; gchar *debug; gchar *str; gst_message_parse_error (message, &err, &debug); str = gst_element_get_name (message->src); g_print ("%s error: %s\n", str, err->message); g_free (str); g_print ("Debug: %s\n", debug); g_error_free (err); g_free (debug); printf ("presse <ENTER> key to exit\n"); exit_read = 1; g_main_loop_quit (loop); break; case GST_MESSAGE_EOS: /* end-of-stream */ printf ("presse any key to exit\n"); exit_read = 1; g_main_loop_quit (loop); break; default: break; } } return TRUE; }
static void gst_rtp_dtmf_prepare_timestamps (GstRTPDTMFSrc * dtmfsrc) { GstClock *clock; GstClockTime base_time; #ifdef MAEMO_BROKEN base_time = 0; #else base_time = gst_element_get_base_time (GST_ELEMENT (dtmfsrc)); #endif clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc)); if (clock != NULL) { dtmfsrc->timestamp = gst_clock_get_time (clock) + (MIN_INTER_DIGIT_INTERVAL * GST_MSECOND) - base_time; dtmfsrc->start_timestamp = dtmfsrc->timestamp; gst_object_unref (clock); } else { gchar *dtmf_name = gst_element_get_name (dtmfsrc); GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name); dtmfsrc->timestamp = GST_CLOCK_TIME_NONE; g_free (dtmf_name); } dtmfsrc->rtp_timestamp = dtmfsrc->ts_base + gst_util_uint64_scale_int (gst_segment_to_running_time (&GST_BASE_SRC (dtmfsrc)->segment, GST_FORMAT_TIME, dtmfsrc->timestamp), dtmfsrc->clock_rate, GST_SECOND); }
void GStreamerWrapper::handleGStMessage() { if ( m_GstBus != NULL ) { while ( gst_bus_have_pending( m_GstBus ) ) { m_GstMessage = gst_bus_pop( m_GstBus ); if ( m_GstMessage != NULL ) { // std::cout << "Message Type: " << GST_MESSAGE_TYPE_NAME( m_GstMessage ) << std::endl; switch ( GST_MESSAGE_TYPE( m_GstMessage ) ) { case GST_MESSAGE_ERROR: GError* err; gchar* debug; gst_message_parse_error( m_GstMessage, &err, &debug ); std::cout << "Embedded video playback halted: module " << gst_element_get_name( GST_MESSAGE_SRC( m_GstMessage ) ) << " reported " << err->message << std::endl; close(); g_error_free(err); g_free(debug); break; case GST_MESSAGE_EOS: switch ( m_LoopMode ) { case NO_LOOP: stop(); break; case LOOP: stop(); play(); break; case BIDIRECTIONAL_LOOP: m_PlayDirection = (PlayDirection)-m_PlayDirection; stop(); play(); break; default: break; } break; default: break; } } gst_object_unref( m_GstMessage ); } } }
/** * helper for gst_goo_util_find_goo_component() to iterate and search * the members of a bin */ static GooComponent * find_goo_component_in_bin (GstBin *bin, SearchContext *ctx) { GstIterator *itr; gpointer item; GooComponent *component = NULL; GST_INFO ("bin=%s (%s)", gst_element_get_name (bin), G_OBJECT_TYPE_NAME (bin)); /* note: we don't handle the case of the underlying data structure changing * while iterating.. we just bail out and the user needs to restart. */ for( itr = gst_bin_iterate_elements (bin); gst_iterator_next (itr, &item) == GST_ITERATOR_OK && !component; gst_object_unref (item) ) { GstElement *elem = GST_ELEMENT (item); component = check_for_goo_component (elem, ctx); if( component == NULL ) { component = find_goo_component (elem, ctx); } } gst_iterator_free (itr); return component; }
/** * \brief This function add the UDP element (udpsrc) to the pipeline / fort a ServiceUser channel * \param pipeline the associated pipeline of the channel * \param bus the bus the channel * \param bus_watch_id an id watch on the bus * \param caps the input video caps built from SDP file, and to give to upsrc * \param channel_entry the channel of the SU in the device's channel Table * \return last element added in pipeline (should be udpsrc normally) */ static GstElement* addUDP_SU( GstElement *pipeline, GstBus *bus, guint bus_watch_id, GstCaps* caps, struct channelTable_entry * channel_entry){ /*Create element that will be add to the pipeline */ GstElement *udpsrc; g_debug("add %s to Service Users's pipeline", UDPSRC_NAME ); /* Create the UDP sink */ udpsrc = gst_element_factory_make_log ("udpsrc", UDPSRC_NAME ); if ( !udpsrc ) return NULL; /* set the parameter of the udpsrc element */ set_udpsrc_param( udpsrc, channel_entry, caps ) ; /* add rtp to pipeline */ if ( !gst_bin_add(GST_BIN (pipeline), udpsrc )){ g_critical("Unable to add %s to pipeline", gst_element_get_name(udpsrc)); return NULL; } return udpsrc; }
static gboolean pad_event_handler(GstPad *pad, GstEvent *event) { // Establish thread-local. MpfComponent *component = MPF_COMPONENT(GST_OBJECT_PARENT(pad)); mpf_component_set_curcomponent(component); GstElement *element = gst_pad_get_parent_element(pad); gchar *elementname = gst_element_get_name(element); gchar *padname = gst_pad_get_name(pad); const gchar *eventname = gst_event_type_get_name(event->type); MPF_PRIVATE_ALWAYS("element=%s pad=%s event=%s\n", elementname, padname, eventname); // If EOS, poke a message out of the events pad. if (event->type == GST_EVENT_EOS) { GstPad *events = gst_element_get_pad(element, "events"); printf("GstPad *events=%p\n", events); GString *string = g_string_new(""); g_string_printf(string, "%s: EOS buffer_count=%d\n", elementname, mpf_private.buffer_count); mpf_voidstar_push("events", mpf_voidstar_stralloc(string->str)); mpf_voidstar_send_outbuffers(); gst_pad_push_event(events, gst_event_new_eos()); } g_free(elementname); g_free(padname); return gst_pad_event_default(pad, event); }
static void kms_base_rtp_session_e2e_latency_cb (GstPad * pad, KmsMediaType type, GstClockTimeDiff t, KmsList * mdata, gpointer user_data) { KmsBaseRtpSession *self = KMS_BASE_RTP_SESSION (user_data); KmsListIter iter; gpointer key, value; gchar *name; name = gst_element_get_name (KMS_SDP_SESSION (self)->ep); kms_list_iter_init (&iter, mdata); while (kms_list_iter_next (&iter, &key, &value)) { gchar *id = (gchar *) key; StreamE2EAvgStat *stat; if (!g_str_has_prefix (id, name)) { /* This element did not add this mark to the metada */ continue; } stat = (StreamE2EAvgStat *) value; stat->avg = KMS_STATS_CALCULATE_LATENCY_AVG (t, stat->avg); } }
static void kms_recorder_endpoint_latency_cb (GstPad * pad, KmsMediaType type, GstClockTimeDiff t, KmsList * mdata, gpointer user_data) { KmsRecorderEndpoint *self = KMS_RECORDER_ENDPOINT (user_data); KmsListIter iter; gpointer key, value; gchar *name; name = gst_element_get_name (self); kms_list_iter_init (&iter, mdata); while (kms_list_iter_next (&iter, &key, &value)) { gchar *id = (gchar *) key; StreamE2EAvgStat *stat; if (!g_str_has_prefix (id, name)) { /* This element did not add this mark to the metada */ continue; } stat = (StreamE2EAvgStat *) value; stat->avg = KMS_STATS_CALCULATE_LATENCY_AVG (t, stat->avg); } }
static gboolean gst_bus_cb(GstBus *bus, GstMessage *msg, BusCbCtx *ctx) { if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ELEMENT && gst_structure_has_name(msg->structure, "level")) { GstElement *src = GST_ELEMENT(GST_MESSAGE_SRC(msg)); gchar *name = gst_element_get_name(src); if (!strcmp(name, "level")) { gdouble percent; gdouble threshold; GstElement *valve; percent = gst_msg_db_to_percent(msg, "rms"); gtk_progress_bar_set_fraction(ctx->level, percent * 5); percent = gst_msg_db_to_percent(msg, "decay"); threshold = gtk_range_get_value(ctx->threshold) / 100.0; valve = gst_bin_get_by_name(GST_BIN(GST_ELEMENT_PARENT(src)), "valve"); g_object_set(valve, "drop", (percent < threshold), NULL); g_object_set(ctx->level, "text", (percent < threshold) ? _("DROP") : " ", NULL); } g_free(name); } return TRUE; }
static void gst_dtmf_prepare_timestamps (GstDTMFSrc * dtmfsrc) { GstClockTime last_stop; GstClockTime timestamp; GST_OBJECT_LOCK (dtmfsrc); last_stop = dtmfsrc->last_stop; GST_OBJECT_UNLOCK (dtmfsrc); if (GST_CLOCK_TIME_IS_VALID (last_stop)) { timestamp = last_stop; } else { GstClock *clock; /* If there is no valid start time, lets use now as the start time */ clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc)); if (clock != NULL) { timestamp = gst_clock_get_time (clock) - gst_element_get_base_time (GST_ELEMENT (dtmfsrc)); gst_object_unref (clock); } else { gchar *dtmf_name = gst_element_get_name (dtmfsrc); GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name); dtmfsrc->timestamp = GST_CLOCK_TIME_NONE; g_free (dtmf_name); return; } } /* Make sure the timestamp always goes forward */ if (timestamp > dtmfsrc->timestamp) dtmfsrc->timestamp = timestamp; }
static gboolean my_bus_callback (GstBus * bus, GstMessage * message, gpointer data) { GstElement *sender = (GstElement *) GST_MESSAGE_SRC (message); const gchar *name = gst_element_get_name (sender); GMainLoop *loop = (GMainLoop *) data; g_print ("Got %s message from %s\n", GST_MESSAGE_TYPE_NAME (message), name); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR:{ GError *err; gchar *debug; gst_message_parse_error (message, &err, &debug); g_print ("Error: %s (%s)\n", err->message, debug); g_error_free (err); g_free (debug); g_main_loop_quit (loop); break; } case GST_MESSAGE_EOS: /* end-of-stream */ g_main_loop_quit (loop); break; default: /* unhandled message */ break; } return TRUE; }
static GstStateChangeReturn nle_object_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: { GstObject *parent = gst_object_get_parent (GST_OBJECT (element)); /* Going to READY and if we are not in a composition, we need to make * sure that the object positioning state is properly commited */ if (parent) { gchar *name = gst_element_get_name (GST_ELEMENT (parent)); if (g_strcmp0 (name, "current-bin") && !NLE_OBJECT_IS_COMPOSITION (NLE_OBJECT (element))) { GST_INFO ("Adding nleobject to something that is not a composition," " commiting ourself"); nle_object_commit (NLE_OBJECT (element), FALSE); } g_free (name); gst_object_unref (parent); } } break; case GST_STATE_CHANGE_READY_TO_PAUSED: if (nle_object_prepare (NLE_OBJECT (element)) == GST_STATE_CHANGE_FAILURE) { ret = GST_STATE_CHANGE_FAILURE; goto beach; } break; default: break; } GST_DEBUG_OBJECT (element, "Calling parent change_state"); ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); GST_DEBUG_OBJECT (element, "Return from parent change_state was %d", ret); if (ret == GST_STATE_CHANGE_FAILURE) goto beach; switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: /* cleanup nleobject */ if (nle_object_cleanup (NLE_OBJECT (element)) == GST_STATE_CHANGE_FAILURE) ret = GST_STATE_CHANGE_FAILURE; break; default: break; } beach: return ret; }
static void get_device_data (ofGstDevice &webcam_device, int desired_framerate) { string pipeline_desc = webcam_device.gstreamer_src + " name=source device=" + webcam_device.video_device + " ! fakesink"; GError * err = NULL; GstElement * pipeline = gst_parse_launch (pipeline_desc.c_str(), &err); if ((pipeline == NULL) || (err != NULL)){ if (err){ ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data: %s", err->message); g_error_free (err); }else{ ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data, cannot get pipeline"); } if(pipeline) gst_object_unref (pipeline); return; } // TODO: try to lower seconds, // Start the pipeline and wait for max. 10 seconds for it to start up gst_element_set_state (pipeline, GST_STATE_PLAYING); GstStateChangeReturn ret = gst_element_get_state (pipeline, NULL, NULL, 10 * GST_SECOND); // Check if any error messages were posted on the bus GstBus * bus = gst_element_get_bus (pipeline); GstMessage * msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0); gst_object_unref (bus); if ((msg == NULL) && (ret == GST_STATE_CHANGE_SUCCESS)){ gst_element_set_state (pipeline, GST_STATE_PAUSED); GstElement *src = gst_bin_get_by_name (GST_BIN (pipeline), "source"); char *name; g_object_get (G_OBJECT (src), "device-name", &name, (void*)NULL); ofLog(OF_LOG_VERBOSE, "Device: %s (%s)\n", name==NULL?"":name, webcam_device.video_device.c_str()); GstPad *pad = gst_element_get_pad (src, "src"); GstCaps *caps = gst_pad_get_caps (pad); gst_object_unref (pad); get_supported_video_formats (webcam_device, *caps, desired_framerate); gst_caps_unref (caps); }else if(msg){ gchar *debug; gst_message_parse_error(msg, &err, &debug); ofLog(OF_LOG_ERROR, "ofGstUtils: error getting device data; module %s reported: %s", gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); g_error_free(err); g_free(debug); } gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); }
void Uridecodebin::unknown_type_cb(GstElement* bin, GstPad* pad, GstCaps* caps, gpointer user_data) { Uridecodebin* context = static_cast<Uridecodebin*>(user_data); g_warning( "Uridecodebin unknown type: %s (%s)\n", gst_caps_to_string(caps), gst_element_get_name(bin)); context->pad_to_shmdata_writer(context->gst_pipeline_->get_pipeline(), pad); }
/* Component instance initialization and parameter handling. */ component_setup() { mpf_private.sustain = mpf_param_get_int("sustain"); mpf_private.preset = mpf_param_get_int("preset"); mpf_private.threshold = mpf_param_get_int("threshold"); mpf_private.loglevel = mpf_param_get_int("loglevel"); mpf_logger_init(&mpf_private.logger, mpf_private.loglevel, gst_element_get_name(GST_ELEMENT(component))); }
/* Component instance initialization and parameter handling. */ component_setup() { mpf_private.loglevel = mpf_param_get_int("loglevel"); mpf_logger_init(&mpf_private.logger, mpf_private.loglevel, gst_element_get_name(GST_ELEMENT(component))); GstPad *input = gst_element_get_pad(GST_ELEMENT( mpf_component_get_curcomponent()), "input"); gst_pad_set_event_function(input, pad_event_handler); if (mpf_param_get_int("mpf-debug")) mpf_component_get_curcomponent()->flags = MPF_DEBUG; }
void GStreamerBaseFrameSourceImpl::handleGStreamerMessages() { GstMessage* msg = NULL; GError *err = NULL; gchar *debug = NULL; GstStreamStatusType tp; GstElement * elem = NULL; if (!bus) return; while (gst_bus_have_pending(bus)) { msg = gst_bus_pop(bus); if (gst_is_missing_plugin_message(msg)) { printf("GStreamer: your gstreamer installation is missing a required plugin!\n"); end = true; } else { switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_STATE_CHANGED: GstState oldstate, newstate, pendstate; gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); break; case GST_MESSAGE_ERROR: { gst_message_parse_error(msg, &err, &debug); std::unique_ptr<char[], GlibDeleter> name(gst_element_get_name(GST_MESSAGE_SRC (msg))); printf("GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n", name.get(), err->message); g_error_free(err); g_free(debug); end = true; break; } case GST_MESSAGE_EOS: end = true; break; case GST_MESSAGE_STREAM_STATUS: gst_message_parse_stream_status(msg,&tp,&elem); break; default: break; } } gst_message_unref(msg); } }
static void zrtp_statusInfo (GstElement *element, gint severity, gint subCode, gpointer data) { switch (severity) { case zrtp_Info: g_print("ZRTP status info message - %s: %s\n", gst_element_get_name(element), InfoCodes[subCode]); break; case zrtp_Warning: g_print("ZRTP status warning message - %s: %s\n", gst_element_get_name(element), WarningCodes[subCode]); break; case zrtp_Severe: g_print("ZRTP status severe message - %s: %s\n", gst_element_get_name(element), SevereCodes[subCode]); break; case zrtp_ZrtpError: g_print("ZRTP Error: severity - %s: %d, subcode: %x\n", gst_element_get_name(element), severity, subCode*-1); break; } }
static void qos_cb (GstBus * bus, GstMessage * msg, APP_STATE_T * state) { GstFormat fmt = GST_FORMAT_BUFFERS; gchar *name = gst_element_get_name (GST_MESSAGE_SRC (msg)); gst_message_parse_qos_stats (msg, &fmt, &state->rendered, &state->dropped); g_print ("%s rendered: %" G_GUINT64_FORMAT " dropped: %" G_GUINT64_FORMAT " %s\n", name, state->rendered, state->dropped, (fmt == GST_FORMAT_BUFFERS ? "frames" : "samples")); g_free (name); }
static GstElement * get_encoder (const GstCaps * caps, GError ** err) { GList *encoders = NULL; GList *filtered = NULL; GstElementFactory *factory = NULL; GstElement *encoder = NULL; encoders = gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_ENCODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE, GST_RANK_NONE); if (encoders == NULL) { *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN, "Cannot find any image encoder"); goto fail; } GST_INFO ("got factory list %p", encoders); gst_plugin_feature_list_debug (encoders); filtered = gst_element_factory_list_filter (encoders, caps, GST_PAD_SRC, FALSE); GST_INFO ("got filtered list %p", filtered); if (filtered == NULL) { gchar *tmp = gst_caps_to_string (caps); *err = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN, "Cannot find any image encoder for caps %s", tmp); g_free (tmp); goto fail; } gst_plugin_feature_list_debug (filtered); factory = (GstElementFactory *) filtered->data; GST_INFO ("got factory %p", factory); encoder = gst_element_factory_create (factory, NULL); GST_INFO ("created encoder element %p, %s", encoder, gst_element_get_name (encoder)); fail: if (encoders) gst_plugin_feature_list_free (encoders); if (filtered) gst_plugin_feature_list_free (filtered); return encoder; }
static GstElement * gst_rtsp_cam_media_factory_get_element (GstRTSPMediaFactory *media_factory, const GstRTSPUrl *url) { GstElement *video_payloader = NULL; GstElement *audio_payloader = NULL; GstElement *bin = NULL; gint payloader_number = 0; GstRTSPCamMediaFactory *factory = GST_RTSP_CAM_MEDIA_FACTORY (media_factory); (void) url; /* unused */ bin = gst_bin_new (NULL); if (factory->video) { video_payloader = create_video_payloader(factory, bin, payloader_number); if (video_payloader) { GST_INFO_OBJECT (factory, "created video payloader %s", gst_element_get_name (video_payloader)); payloader_number += 1; } } if (factory->audio) { audio_payloader = create_audio_payloader(factory, bin, payloader_number); if (audio_payloader) GST_INFO_OBJECT (factory, "created audio payloader %s", gst_element_get_name (audio_payloader)); } if (!video_payloader && !audio_payloader) { GST_ERROR_OBJECT (factory, "no audio and no video"); gst_object_unref (bin); return NULL; } return bin; }
static void autovideosink_element_added_cb (GstElement *autovideosink, GstElement *element, GbpPlayer *player) { GObjectClass *klass; GST_INFO_OBJECT (player, "using sink %s", gst_element_get_name (element)); klass = G_OBJECT_GET_CLASS (element); if (!g_object_class_find_property (klass, "double-buffer")) return; g_object_set (G_OBJECT (element), "double-buffer", FALSE, NULL); }