/* free the buffer */ g_free (buf->memory); buf->memory = NULL; return TRUE; } static gboolean gst_jack_ring_buffer_start (GstAudioRingBuffer * buf) { GstJackAudioSink *sink; sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (sink, "start"); if (sink->transport & GST_JACK_TRANSPORT_MASTER) { jack_client_t *client; client = gst_jack_audio_client_get_client (sink->client); jack_transport_start (client); } return TRUE; } static gboolean gst_jack_ring_buffer_pause (GstAudioRingBuffer * buf) { GstJackAudioSink *sink; sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (sink, "pause"); if (sink->transport & GST_JACK_TRANSPORT_MASTER) { jack_client_t *client; client = gst_jack_audio_client_get_client (sink->client); jack_transport_stop (client); } return TRUE; } static gboolean gst_jack_ring_buffer_stop (GstAudioRingBuffer * buf) { GstJackAudioSink *sink; sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (sink, "stop"); if (sink->transport & GST_JACK_TRANSPORT_MASTER) { jack_client_t *client; client = gst_jack_audio_client_get_client (sink->client); jack_transport_stop (client); } return TRUE; } #if defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7) static guint gst_jack_ring_buffer_delay (GstAudioRingBuffer * buf) { GstJackAudioSink *sink; guint i, res = 0; jack_latency_range_t range; sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf)); for (i = 0; i < sink->port_count; i++) { jack_port_get_latency_range (sink->ports[i], JackPlaybackLatency, &range); if (range.max > res) res = range.max; } GST_LOG_OBJECT (sink, "delay %u", res); return res; } #else /* !(defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7)) */ static guint gst_jack_ring_buffer_delay (GstAudioRingBuffer * buf) { GstJackAudioSink *sink; guint i, res = 0; guint latency; jack_client_t *client; sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf)); client = gst_jack_audio_client_get_client (sink->client); for (i = 0; i < sink->port_count; i++) { latency = jack_port_get_total_latency (client, sink->ports[i]); if (latency > res) res = latency; } GST_LOG_OBJECT (sink, "delay %u", res); return res; }
static GstFlowReturn appsink_handle_many (GstElement * appsink, gpointer data) { int *count = g_object_get_data (G_OBJECT (appsink), COUNT_KEY); GstSample *sample; if (count == NULL) { count = g_malloc0 (sizeof (int)); g_object_set_data_full (G_OBJECT (appsink), COUNT_KEY, count, g_free); } g_signal_emit_by_name (appsink, "pull-sample", &sample); gst_sample_unref (sample); if (g_atomic_int_add (count, 1) == 40) { GST_DEBUG_OBJECT (appsink, "Terminatig"); g_idle_add (check_pipeline_termination, GST_OBJECT_PARENT (appsink)); } return GST_FLOW_OK; }
static GstPadProbeReturn remove_on_unlinked_blocked (GstPad * pad, GstPadProbeInfo * info, gpointer elem) { KmsAgnosticBin2 *self; GstPad *sink; if (elem == NULL) { return GST_PAD_PROBE_REMOVE; } GST_DEBUG_OBJECT (pad, "Unlinking pad"); GST_OBJECT_LOCK (pad); if (g_object_get_qdata (G_OBJECT (pad), unlinking_data_quark ())) { GST_OBJECT_UNLOCK (pad); if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_QUERY_BOTH) { /* Queries must be answered */ return GST_PAD_PROBE_PASS; } else { return GST_PAD_PROBE_DROP; } } g_object_set_qdata (G_OBJECT (pad), unlinking_data_quark (), GINT_TO_POINTER (TRUE)); GST_OBJECT_UNLOCK (pad); sink = gst_pad_get_peer (pad); if (sink != NULL) { gst_pad_unlink (pad, sink); g_object_unref (sink); } self = KMS_AGNOSTIC_BIN2 (GST_OBJECT_PARENT (elem)); g_thread_pool_push (self->priv->remove_pool, g_object_ref (elem), NULL); return GST_PAD_PROBE_PASS; }
static GstPadProbeReturn input_bin_src_caps_probe (GstPad * pad, GstPadProbeInfo * info, gpointer bin) { KmsAgnosticBin2 *self = KMS_AGNOSTIC_BIN2 (GST_OBJECT_PARENT (bin)); GstEvent *event = gst_pad_probe_info_get_event (info); GstCaps *current_caps; if (self == NULL) { GST_WARNING_OBJECT (bin, "Parent agnosticbin seems to be released"); return GST_PAD_PROBE_OK; } GST_TRACE_OBJECT (self, "Event in parser pad: %" GST_PTR_FORMAT, event); if (GST_EVENT_TYPE (event) != GST_EVENT_CAPS) { return GST_PAD_PROBE_OK; } KMS_AGNOSTIC_BIN2_LOCK (self); self->priv->started = TRUE; if (self->priv->input_bin_src_caps != NULL) { gst_caps_unref (self->priv->input_bin_src_caps); } gst_event_parse_caps (event, ¤t_caps); self->priv->input_bin_src_caps = gst_caps_copy (current_caps); kms_agnostic_bin2_insert_bin (self, GST_BIN (bin)); GST_INFO_OBJECT (self, "Setting current caps to: %" GST_PTR_FORMAT, current_caps); kms_element_for_each_src_pad (GST_ELEMENT (self), (KmsPadIterationAction) add_linked_pads, self); KMS_AGNOSTIC_BIN2_UNLOCK (self); return GST_PAD_PROBE_REMOVE; }
static gboolean gst_vaapidecode_set_caps(GstPad *pad, GstCaps *caps) { GstVaapiDecode * const decode = GST_VAAPIDECODE(GST_OBJECT_PARENT(pad)); g_return_val_if_fail(pad == decode->sinkpad, FALSE); if (!gst_vaapidecode_update_sink_caps(decode, caps)) return FALSE; if (!gst_vaapidecode_update_src_caps(decode, caps)) return FALSE; if (!gst_vaapidecode_reset(decode, decode->sinkpad_caps)) return FALSE; /* Propagate NEWSEGMENT event downstream, now that pads are linked */ if (decode->delayed_new_seg) { if (gst_pad_push_event(decode->srcpad, decode->delayed_new_seg)) gst_event_unref(decode->delayed_new_seg); decode->delayed_new_seg = NULL; } return TRUE; }
/* we error out */ static int jack_sample_rate_cb (jack_nframes_t nframes, void *arg) { GstJackAudioSink *sink; GstJackRingBuffer *abuf; abuf = GST_JACK_RING_BUFFER_CAST (arg); sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (arg)); if (abuf->sample_rate != -1 && abuf->sample_rate != nframes) goto not_supported; return 0; /* ERRORS */ not_supported: { GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL), ("Jack changed the sample rate, which is not supported")); return 1; } }
/* free the buffer */ gst_buffer_unref (buf->data); buf->data = NULL; return TRUE; } static gboolean gst_jack_ring_buffer_start (GstRingBuffer * buf) { GstJackAudioSrc *src; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (src, "start"); if (src->transport & GST_JACK_TRANSPORT_MASTER) { jack_client_t *client; client = gst_jack_audio_client_get_client (src->client); jack_transport_start (client); } return TRUE; } static gboolean gst_jack_ring_buffer_pause (GstRingBuffer * buf) { GstJackAudioSrc *src; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (src, "pause"); if (src->transport & GST_JACK_TRANSPORT_MASTER) { jack_client_t *client; client = gst_jack_audio_client_get_client (src->client); jack_transport_stop (client); } return TRUE; } static gboolean gst_jack_ring_buffer_stop (GstRingBuffer * buf) { GstJackAudioSrc *src; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (src, "stop"); if (src->transport & GST_JACK_TRANSPORT_MASTER) { jack_client_t *client; client = gst_jack_audio_client_get_client (src->client); jack_transport_stop (client); } return TRUE; } #if defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7) static guint gst_jack_ring_buffer_delay (GstRingBuffer * buf) { GstJackAudioSrc *src; guint i, res = 0; jack_latency_range_t range; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); for (i = 0; i < src->port_count; i++) { jack_port_get_latency_range (src->ports[i], JackCaptureLatency, &range); if (range.max > res) res = range.max; } GST_DEBUG_OBJECT (src, "delay %u", res); return res; } #else /* !(defined (HAVE_JACK_0_120_1) || defined(HAVE_JACK_1_9_7)) */ static guint gst_jack_ring_buffer_delay (GstRingBuffer * buf) { GstJackAudioSrc *src; guint i, res = 0; guint latency; jack_client_t *client; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); client = gst_jack_audio_client_get_client (src->client); for (i = 0; i < src->port_count; i++) { latency = jack_port_get_total_latency (client, src->ports[i]); if (latency > res) res = latency; } GST_DEBUG_OBJECT (src, "delay %u", res); return res; }
/* we error out */ static int jack_buffer_size_cb (jack_nframes_t nframes, void *arg) { GstJackAudioSrc *src; GstJackRingBuffer *abuf; abuf = GST_JACK_RING_BUFFER_CAST (arg); src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (arg)); if (abuf->buffer_size != -1 && abuf->buffer_size != nframes) goto not_supported; return 0; /* ERRORS */ not_supported: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("Jack changed the buffer size, which is not supported")); return 1; } }
/* the _open_device method should make a connection with the server */ static gboolean gst_jack_ring_buffer_open_device (GstRingBuffer * buf) { GstJackAudioSrc *src; jack_status_t status = 0; const gchar *name; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); GST_DEBUG_OBJECT (src, "open"); name = g_get_application_name (); if (!name) name = "GStreamer"; src->client = gst_jack_audio_client_new (name, src->server, GST_JACK_CLIENT_SOURCE, jack_shutdown_cb, jack_process_cb, jack_buffer_size_cb, jack_sample_rate_cb, buf, &status); if (src->client == NULL) goto could_not_open; GST_DEBUG_OBJECT (src, "opened"); return TRUE; /* ERRORS */ could_not_open: { if (status & JackServerFailed) { GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL), ("Cannot connect to the Jack server (status %d)", status)); } else { GST_ELEMENT_ERROR (src, RESOURCE, OPEN_WRITE, (NULL), ("Jack client open error (status %d)", status)); } return FALSE; } }
/* chain function * this function does the actual processing */ static GstFlowReturn gst_faceblur_chain (GstPad * pad, GstBuffer * buf) { Gstfaceblur *filter; CvSeq *faces; int i; filter = GST_FACEBLUR (GST_OBJECT_PARENT (pad)); filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf); cvCvtColor (filter->cvImage, filter->cvGray, CV_RGB2GRAY); cvClearMemStorage (filter->cvStorage); if (filter->cvCascade) { faces = cvHaarDetectObjects (filter->cvGray, filter->cvCascade, filter->cvStorage, 1.1, 2, 0, cvSize (30, 30) #if (CV_MAJOR_VERSION >= 2) && (CV_MINOR_VERSION >= 2) , cvSize (32, 32) #endif ); if (faces && faces->total > 0) { buf = gst_buffer_make_writable (buf); } for (i = 0; i < (faces ? faces->total : 0); i++) { CvRect *r = (CvRect *) cvGetSeqElem (faces, i); cvSetImageROI (filter->cvImage, *r); cvSmooth (filter->cvImage, filter->cvImage, CV_BLUR, 11, 11, 0, 0); cvSmooth (filter->cvImage, filter->cvImage, CV_GAUSSIAN, 11, 11, 0, 0); cvResetImageROI (filter->cvImage); } } /* these filters operate in place, so we push the same buffer */ return gst_pad_push (filter->srcpad, buf); }
static gboolean proxy_src_pad_query_function (GstPad * pad, GstObject * parent, GstQuery * query) { gboolean ret = gst_pad_query_default (pad, parent, query); if (!ret) { return ret; } if (GST_QUERY_TYPE (query) == GST_QUERY_ACCEPT_CAPS) { gboolean accepted; gst_query_parse_accept_caps_result (query, &accepted); if (!accepted) { GstProxyPad *gp = gst_proxy_pad_get_internal (GST_PROXY_PAD (pad)); KmsAgnosticBin2 *self = NULL; GST_ERROR_OBJECT (pad, "Caps not accepted: %" GST_PTR_FORMAT, query); if (gp) { self = KMS_AGNOSTIC_BIN2 (GST_OBJECT_PARENT (gp)); } if (self) { KMS_AGNOSTIC_BIN2_LOCK (self); remove_target_pad (GST_PAD_CAST (gp)); kms_agnostic_bin2_process_pad (self, GST_PAD_CAST (gp)); KMS_AGNOSTIC_BIN2_UNLOCK (self); } g_object_unref (gp); } } return ret; }
void _owr_deep_notify(GObject *object, GstObject *orig, GParamSpec *pspec, gpointer user_data) { GValue value = G_VALUE_INIT; gchar *str = NULL; GstObject *it; gchar *prevpath, *path; OWR_UNUSED(user_data); OWR_UNUSED(object); path = g_strdup(""); for (it = orig; GST_IS_OBJECT(it); it = GST_OBJECT_PARENT(it)) { prevpath = path; path = g_strjoin("/", GST_OBJECT_NAME(it), prevpath, NULL); g_free(prevpath); } if (pspec->flags & G_PARAM_READABLE) { g_value_init(&value, pspec->value_type); g_object_get_property(G_OBJECT(orig), pspec->name, &value); if (G_VALUE_TYPE(&value) == GST_TYPE_CAPS) str = gst_caps_to_string(gst_value_get_caps(&value)); else if (G_VALUE_HOLDS_STRING(&value)) str = g_value_dup_string(&value); else str = gst_value_serialize(&value); GST_INFO_OBJECT(object, "%s%s = %s\n", path, pspec->name, str); g_free(str); g_value_unset(&value); } else GST_INFO_OBJECT(object, "Parameter %s not readable in %s.", pspec->name, path); g_free(path); }
static void gst_tee_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstTee *tee = GST_TEE (object); GST_OBJECT_LOCK (tee); switch (prop_id) { case PROP_HAS_CHAIN: tee->has_chain = g_value_get_boolean (value); break; case PROP_SILENT: tee->silent = g_value_get_boolean (value); break; case PROP_PULL_MODE: tee->pull_mode = (GstTeePullMode) g_value_get_enum (value); break; case PROP_ALLOC_PAD: { GstPad *pad = g_value_get_object (value); GST_OBJECT_LOCK (pad); if (GST_OBJECT_PARENT (pad) == GST_OBJECT_CAST (object)) tee->allocpad = pad; else GST_WARNING_OBJECT (object, "Tried to set alloc pad %s which" " is not my pad", GST_OBJECT_NAME (pad)); GST_OBJECT_UNLOCK (pad); break; } case PROP_ALLOW_NOT_LINKED: tee->allow_not_linked = g_value_get_boolean (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } GST_OBJECT_UNLOCK (tee); }
static gboolean gst_audioringbuffer_close_device (GstRingBuffer * buf) { GstAudioSrc *src; GstAudioSrcClass *csrc; gboolean result = TRUE; src = GST_AUDIO_SRC (GST_OBJECT_PARENT (buf)); csrc = GST_AUDIO_SRC_GET_CLASS (src); if (csrc->close) result = csrc->close (src); if (!result) goto could_not_open; return result; could_not_open: { return FALSE; } }
static gboolean gst_shout2send_setcaps (GstPad * pad, GstCaps * caps) { const gchar *mimetype; GstShout2send *shout2send; gboolean ret = TRUE; shout2send = GST_SHOUT2SEND (GST_OBJECT_PARENT (pad)); mimetype = gst_structure_get_name (gst_caps_get_structure (caps, 0)); GST_DEBUG_OBJECT (shout2send, "mimetype of caps given is: %s", mimetype); if (!strcmp (mimetype, "audio/mpeg")) { shout2send->audio_format = SHOUT_FORMAT_MP3; } else if (!strcmp (mimetype, "application/ogg")) { shout2send->audio_format = SHOUT_FORMAT_VORBIS; } else { ret = FALSE; } return ret; }
static OSStatus _audio_stream_hardware_changed_listener (AudioObjectID inObjectID, UInt32 inNumberAddresses, const AudioObjectPropertyAddress inAddresses[], void *inClientData) { OSStatus status = noErr; guint i; GstCoreAudio *core_audio = inClientData; for (i = 0; i < inNumberAddresses; i++) { if (inAddresses[i].mSelector == kAudioDevicePropertyDeviceHasChanged) { if (!gst_core_audio_audio_device_is_spdif_avail (core_audio->device_id)) { GstOsxAudioSink *sink = GST_OSX_AUDIO_SINK (GST_OBJECT_PARENT (core_audio->osxbuf)); GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, ("SPDIF output no longer available"), ("Audio device is reporting that SPDIF output isn't available")); } break; } } return (status); }
static gboolean gst_audioringbuffer_stop (GstRingBuffer * buf) { GstAudioSrc *src; GstAudioSrcClass *csrc; src = GST_AUDIO_SRC (GST_OBJECT_PARENT (buf)); csrc = GST_AUDIO_SRC_GET_CLASS (src); /* unblock any pending writes to the audio device */ if (csrc->reset) { GST_DEBUG ("reset..."); csrc->reset (src); GST_DEBUG ("reset done"); } #if 0 GST_DEBUG ("stop, waiting..."); GST_AUDIORING_BUFFER_WAIT (buf); GST_DEBUG ("stoped"); #endif return TRUE; }
static gboolean pad_event (GstPad *pad, GstEvent *event) { GstOmxBaseVfpc2 *self; GstOmxBaseFilter2 *omx_base; self = GST_OMX_BASE_VFPC2 (GST_OBJECT_PARENT (pad)); omx_base = GST_OMX_BASE_FILTER2 (self); GST_INFO_OBJECT (self, "begin: event=%s", GST_EVENT_TYPE_NAME (event)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_CROP: { gst_event_parse_crop (event, &self->top, &self->left, NULL, NULL); return TRUE; } default: { return parent_class->pad_event (pad, event); } } }
static GstCaps * gst_droid_cam_src_vfsrc_getcaps (GstPad * pad) { GstDroidCamSrc *src = GST_DROID_CAM_SRC (GST_OBJECT_PARENT (pad)); GstCaps *caps = NULL; GST_DEBUG_OBJECT (src, "vfsrc getcaps"); GST_OBJECT_LOCK (src); if (src->camera_params) { int x; uint len; caps = camera_params_get_viewfinder_caps (src->camera_params); len = gst_caps_get_size (caps); GST_CAMERA_BUFFER_POOL_LOCK (src->pool); for (x = 0; x < len; x++) { GstStructure *s = gst_caps_get_structure (caps, x); gst_structure_set (s, "orientation-angle", G_TYPE_INT, src->pool->orientation, NULL); } GST_CAMERA_BUFFER_POOL_UNLOCK (src->pool); } else { caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); } GST_OBJECT_UNLOCK (src); GST_LOG_OBJECT (src, "returning %" GST_PTR_FORMAT, caps); return caps; }
static gboolean gst_ogg_avi_parse_event (GstPad * pad, GstEvent * event) { GstOggAviParse *ogg; gboolean ret; ogg = GST_OGG_AVI_PARSE (GST_OBJECT_PARENT (pad)); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_FLUSH_START: ret = gst_pad_push_event (ogg->srcpad, event); break; case GST_EVENT_FLUSH_STOP: ogg_sync_reset (&ogg->sync); ogg_stream_reset (&ogg->stream); ogg->discont = TRUE; ret = gst_pad_push_event (ogg->srcpad, event); break; default: ret = gst_pad_push_event (ogg->srcpad, event); break; } return ret; }
static GstFlowReturn rsn_parsetter_chain (GstPad * pad, GstBuffer * buf) { RsnParSetter *parset = RSN_PARSETTER (GST_OBJECT_PARENT (pad)); /* If this is a buffer we wrapped up earlier, unwrap it now */ if (RSN_IS_WRAPPEDBUFFER (buf)) { RsnWrappedBuffer *wrap_buf = RSN_WRAPPEDBUFFER (buf); if (wrap_buf->owner == GST_ELEMENT (parset)) { buf = rsn_wrappedbuffer_unwrap_and_unref (wrap_buf); GST_DEBUG_OBJECT (parset, "Unwrapping %p yields buffer %p with caps %" GST_PTR_FORMAT, wrap_buf, buf, GST_BUFFER_CAPS (buf)); } } if (parset->outcaps != GST_BUFFER_CAPS (buf)) { if (parset->override_outcaps == FALSE && gst_caps_is_equal (parset->outcaps, GST_BUFFER_CAPS (buf))) { /* Just update our output caps var */ gst_caps_replace (&parset->outcaps, GST_BUFFER_CAPS (buf)); goto out; } /* Replace the caps on the output buffer */ buf = gst_buffer_make_metadata_writable (buf); gst_buffer_set_caps (buf, parset->outcaps); GST_DEBUG_OBJECT (parset, "Replacing caps on buffer %p with caps %" GST_PTR_FORMAT, buf, parset->outcaps); } out: return gst_pad_push (parset->srcpad, buf); }
static gboolean gst_vaapidecode_sink_event(GstPad *pad, GstEvent *event) { GstVaapiDecode * const decode = GST_VAAPIDECODE(GST_OBJECT_PARENT(pad)); GST_DEBUG("handle sink event '%s'", GST_EVENT_TYPE_NAME(event)); /* Propagate event downstream */ switch (GST_EVENT_TYPE(event)) { case GST_EVENT_NEWSEGMENT: if (decode->delayed_new_seg) { gst_event_unref(decode->delayed_new_seg); decode->delayed_new_seg = NULL; } if (!GST_PAD_PEER(decode->srcpad)) { decode->delayed_new_seg = gst_event_ref(event); return TRUE; } break; default: break; } return gst_pad_push_event(decode->srcpad, event); }
static void change_state (KmsUriEndpointState state) { GstElement *testsrc; GstElement *testsink; GST_DEBUG ("Setting recorder to state %s", state2string (state)); g_object_set (G_OBJECT (recorder), "state", state, NULL); /* Add more element to the pipeline to check that this does not affect to the timestamps */ testsrc = gst_element_factory_make ("videotestsrc", NULL); testsink = gst_element_factory_make ("fakesink", NULL); g_object_set (testsink, "async", FALSE, "sync", FALSE, NULL); g_object_set (testsrc, "is-live", TRUE, NULL); GST_DEBUG_OBJECT (recorder, "Adding more elements"); gst_bin_add_many (GST_BIN (GST_OBJECT_PARENT (recorder)), testsrc, testsink, NULL); gst_element_link (testsrc, testsink); gst_element_sync_state_with_parent (testsink); gst_element_sync_state_with_parent (testsrc); }
static gboolean gst_audioringbuffer_close_device (GstRingBuffer * buf) { GstAudioSink *sink; GstAudioSinkClass *csink; gboolean result = TRUE; sink = GST_AUDIO_SINK (GST_OBJECT_PARENT (buf)); csink = GST_AUDIO_SINK_GET_CLASS (sink); if (csink->close) result = csink->close (sink); if (!result) goto could_not_close; return result; could_not_close: { GST_DEBUG_OBJECT (sink, "could not close device"); return FALSE; } }
static gboolean gst_audio_sink_ring_buffer_open_device (GstAudioRingBuffer * buf) { GstAudioSink *sink; GstAudioSinkClass *csink; gboolean result = TRUE; sink = GST_AUDIO_SINK (GST_OBJECT_PARENT (buf)); csink = GST_AUDIO_SINK_GET_CLASS (sink); if (csink->open) result = csink->open (sink); if (!result) goto could_not_open; return result; could_not_open: { GST_DEBUG_OBJECT (sink, "could not open device"); return FALSE; } }
static void gst_pngdec_task (GstPad * pad) { GstPngDec *pngdec; GstBuffer *buffer = NULL; size_t buffer_size = 0; gint i = 0; png_bytep *rows, inp; png_uint_32 rowbytes; GstFlowReturn ret = GST_FLOW_OK; pngdec = GST_PNGDEC (GST_OBJECT_PARENT (pad)); GST_LOG_OBJECT (pngdec, "read frame"); /* Let libpng come back here on error */ if (setjmp (png_jmpbuf (pngdec->png))) { ret = GST_FLOW_ERROR; goto pause; } /* Set reading callback */ png_set_read_fn (pngdec->png, pngdec, user_read_data); /* Read info */ png_read_info (pngdec->png, pngdec->info); /* Generate the caps and configure */ ret = gst_pngdec_caps_create_and_set (pngdec); if (ret != GST_FLOW_OK) { goto pause; } /* Allocate output buffer */ rowbytes = png_get_rowbytes (pngdec->png, pngdec->info); if (rowbytes > (G_MAXUINT32 - 3) || pngdec->height > G_MAXUINT32 / rowbytes) { ret = GST_FLOW_ERROR; goto pause; } rowbytes = GST_ROUND_UP_4 (rowbytes); buffer_size = pngdec->height * rowbytes; ret = gst_pad_alloc_buffer_and_set_caps (pngdec->srcpad, GST_BUFFER_OFFSET_NONE, buffer_size, GST_PAD_CAPS (pngdec->srcpad), &buffer); if (ret != GST_FLOW_OK) goto pause; rows = (png_bytep *) g_malloc (sizeof (png_bytep) * pngdec->height); inp = GST_BUFFER_DATA (buffer); for (i = 0; i < pngdec->height; i++) { rows[i] = inp; inp += rowbytes; } /* Read the actual picture */ png_read_image (pngdec->png, rows); g_free (rows); /* Push the raw RGB frame */ ret = gst_pad_push (pngdec->srcpad, buffer); if (ret != GST_FLOW_OK) goto pause; /* And we are done */ gst_pad_pause_task (pngdec->sinkpad); gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ()); return; pause: { GST_INFO_OBJECT (pngdec, "pausing task, reason %s", gst_flow_get_name (ret)); gst_pad_pause_task (pngdec->sinkpad); if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED) { GST_ELEMENT_ERROR (pngdec, STREAM, FAILED, (_("Internal data stream error.")), ("stream stopped, reason %s", gst_flow_get_name (ret))); gst_pad_push_event (pngdec->srcpad, gst_event_new_eos ()); } } }
/* chain function * this function does the actual processing */ static GstFlowReturn gst_motion_cells_chain (GstPad * pad, GstBuffer * buf) { GstMotioncells *filter; filter = gst_motion_cells (GST_OBJECT_PARENT (pad)); if (filter->calculate_motion) { double sensitivity; int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count, motioncells_count, i; int thickness, success, motioncellsidxcnt, numberOfCells, motioncellsnumber, cellsOfInterestNumber; int mincellsOfInterestNumber, motiondetect; char *datafile; bool display, changed_datafile, useAlpha; gint64 starttime; motionmaskcoordrect *motionmaskcoords; motioncellidx *motionmaskcellsidx; cellscolor motioncellscolor; motioncellidx *motioncellsidx; g_mutex_lock (filter->propset_mutex); buf = gst_buffer_make_writable (buf); filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf); if (filter->firstframe) { setPrevFrame (filter->cvImage, filter->id); filter->firstframe = FALSE; } sensitivity = filter->sensitivity; framerate = filter->framerate; gridx = filter->gridx; gridy = filter->gridy; display = filter->display; motionmaskcoord_count = filter->motionmaskcoord_count; motionmaskcoords = g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count); for (i = 0; i < filter->motionmaskcoord_count; i++) { //we need divide 2 because we use gauss pyramid in C++ side motionmaskcoords[i].upper_left_x = filter->motionmaskcoords[i].upper_left_x / 2; motionmaskcoords[i].upper_left_y = filter->motionmaskcoords[i].upper_left_y / 2; motionmaskcoords[i].lower_right_x = filter->motionmaskcoords[i].lower_right_x / 2; motionmaskcoords[i].lower_right_y = filter->motionmaskcoords[i].lower_right_y / 2; } motioncellscolor.R_channel_value = filter->motioncellscolor->R_channel_value; motioncellscolor.G_channel_value = filter->motioncellscolor->G_channel_value; motioncellscolor.B_channel_value = filter->motioncellscolor->B_channel_value; if ((filter->changed_gridx || filter->changed_gridy || filter->changed_startime)) { if ((g_strcmp0 (filter->cur_datafile, NULL) != 0)) { GFREE (filter->cur_datafile); filter->datafileidx++; filter->cur_datafile = g_strdup_printf ("%s-%d.%s", filter->basename_datafile, filter->datafileidx, filter->datafile_extension); filter->changed_datafile = TRUE; motion_cells_free_resources (filter->id); } if (filter->motioncells_count > 0) gst_motioncells_update_motion_cells (filter); if (filter->motionmaskcells_count > 0) gst_motioncells_update_motion_masks (filter); filter->changed_gridx = FALSE; filter->changed_gridy = FALSE; filter->changed_startime = FALSE; } datafile = g_strdup (filter->cur_datafile); filter->cur_buff_timestamp = (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND); filter->starttime += (filter->cur_buff_timestamp - filter->prev_buff_timestamp); starttime = filter->starttime; if (filter->changed_datafile || filter->diff_timestamp < 0) filter->diff_timestamp = (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND); changed_datafile = filter->changed_datafile; motionmaskcells_count = filter->motionmaskcells_count; motionmaskcellsidx = g_new0 (motioncellidx, filter->motionmaskcells_count); for (i = 0; i < filter->motionmaskcells_count; i++) { motionmaskcellsidx[i].lineidx = filter->motionmaskcellsidx[i].lineidx; motionmaskcellsidx[i].columnidx = filter->motionmaskcellsidx[i].columnidx; } motioncells_count = filter->motioncells_count; motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count); for (i = 0; i < filter->motioncells_count; i++) { motioncellsidx[i].lineidx = filter->motioncellsidx[i].lineidx; motioncellsidx[i].columnidx = filter->motioncellsidx[i].columnidx; } useAlpha = filter->usealpha; thickness = filter->thickness; success = perform_detection_motion_cells (filter->cvImage, sensitivity, framerate, gridx, gridy, (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND) - filter->diff_timestamp, display, useAlpha, motionmaskcoord_count, motionmaskcoords, motionmaskcells_count, motionmaskcellsidx, motioncellscolor, motioncells_count, motioncellsidx, starttime, datafile, changed_datafile, thickness, filter->id); if ((success == 1) && (filter->sent_init_error_msg == false)) { char *initfailedreason; int initerrorcode; GstStructure *s; GstMessage *m; initfailedreason = getInitDataFileFailed (filter->id); initerrorcode = getInitErrorCode (filter->id); s = gst_structure_new ("motion", "init_error_code", G_TYPE_INT, initerrorcode, "details", G_TYPE_STRING, initfailedreason, NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); filter->sent_init_error_msg = TRUE; } if ((success == -1) && (filter->sent_save_error_msg == false)) { char *savefailedreason; int saveerrorcode; GstStructure *s; GstMessage *m; savefailedreason = getSaveDataFileFailed (filter->id); saveerrorcode = getSaveErrorCode (filter->id); s = gst_structure_new ("motion", "save_error_code", G_TYPE_INT, saveerrorcode, "details", G_TYPE_STRING, savefailedreason, NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); filter->sent_save_error_msg = TRUE; } if (success == -2) { //frame dropped filter->prev_buff_timestamp = filter->cur_buff_timestamp; //free GFREE (datafile); GFREE (motionmaskcoords); GFREE (motionmaskcellsidx); GFREE (motioncellsidx); g_mutex_unlock (filter->propset_mutex); return gst_pad_push (filter->srcpad, buf); } filter->changed_datafile = getChangedDataFile (filter->id); motioncellsidxcnt = getMotionCellsIdxCnt (filter->id); numberOfCells = filter->gridx * filter->gridy; motioncellsnumber = motioncellsidxcnt / MSGLEN; cellsOfInterestNumber = (filter->motioncells_count > 0) ? //how many cells interest for us (filter->motioncells_count) : (numberOfCells); mincellsOfInterestNumber = floor ((double) cellsOfInterestNumber * filter->threshold); motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0; if ((motioncellsidxcnt > 0) && (motiondetect == 1)) { char *detectedmotioncells; filter->last_motion_timestamp = GST_BUFFER_TIMESTAMP (buf); detectedmotioncells = getMotionCellsIdx (filter->id); if (detectedmotioncells) { filter->consecutive_motion++; if ((filter->previous_motion == false) && (filter->consecutive_motion >= filter->minimum_motion_frames)) { GstStructure *s; GstMessage *m; filter->previous_motion = true; filter->motion_begin_timestamp = GST_BUFFER_TIMESTAMP (buf); s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING, detectedmotioncells, "motion_begin", G_TYPE_UINT64, filter->motion_begin_timestamp, NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); } else if (filter->postallmotion) { GstStructure *s; GstMessage *m; filter->motion_timestamp = GST_BUFFER_TIMESTAMP (buf); s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING, detectedmotioncells, "motion", G_TYPE_UINT64, filter->motion_timestamp, NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); } } else { GstStructure *s; GstMessage *m; s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING, "error", NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); } } else { filter->consecutive_motion = 0; if ((((GST_BUFFER_TIMESTAMP (buf) - filter->last_motion_timestamp) / 1000000000l) >= filter->gap) && (filter->last_motion_timestamp > 0)) { GST_DEBUG ("POST MOTION FINISHED MSG\n"); if (filter->previous_motion) { GstStructure *s; GstMessage *m; filter->previous_motion = false; s = gst_structure_new ("motion", "motion_finished", G_TYPE_UINT64, filter->last_motion_timestamp, NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); } } } if (filter->postnomotion > 0) { guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l; if ((last_buf_timestamp - (filter->last_motion_timestamp / 1000000000l)) >= filter->postnomotion) { GST_DEBUG ("POST NO MOTION MSG\n"); if ((last_buf_timestamp - (filter->last_nomotion_notified / 1000000000l)) >= filter->postnomotion) { GstStructure *s; GstMessage *m; filter->last_nomotion_notified = GST_BUFFER_TIMESTAMP (buf); s = gst_structure_new ("motion", "no_motion", G_TYPE_UINT64, filter->last_motion_timestamp, NULL); m = gst_message_new_element (GST_OBJECT (filter), s); gst_element_post_message (GST_ELEMENT (filter), m); } } } filter->prev_buff_timestamp = filter->cur_buff_timestamp; //free GFREE (datafile); GFREE (motionmaskcoords); GFREE (motionmaskcellsidx); GFREE (motioncellsidx); g_mutex_unlock (filter->propset_mutex); } return gst_pad_push (filter->srcpad, buf); }
/* allocate a buffer and setup resources to process the audio samples of * the format as specified in @spec. * * We allocate N jack ports, one for each channel. If we are asked to * automatically make a connection with physical ports, we connect as many * ports as there are physical ports, leaving leftover ports unconnected. * * It is assumed that samplerate and number of channels are acceptable since our * getcaps method will always provide correct values. If unacceptable caps are * received for some reason, we fail here. */ static gboolean gst_jack_ring_buffer_acquire (GstRingBuffer * buf, GstRingBufferSpec * spec) { GstJackAudioSrc *src; GstJackRingBuffer *abuf; const char **ports; gint sample_rate, buffer_size; gint i, channels, res; jack_client_t *client; src = GST_JACK_AUDIO_SRC (GST_OBJECT_PARENT (buf)); abuf = GST_JACK_RING_BUFFER_CAST (buf); GST_DEBUG_OBJECT (src, "acquire"); client = gst_jack_audio_client_get_client (src->client); /* sample rate must be that of the server */ sample_rate = jack_get_sample_rate (client); if (sample_rate != spec->rate) goto wrong_samplerate; channels = spec->channels; if (!gst_jack_audio_src_allocate_channels (src, channels)) goto out_of_ports; buffer_size = jack_get_buffer_size (client); /* the segment size in bytes, this is large enough to hold a buffer of 32bit floats * for all channels */ spec->segsize = buffer_size * sizeof (gfloat) * channels; spec->latency_time = gst_util_uint64_scale (spec->segsize, (GST_SECOND / GST_USECOND), spec->rate * spec->bytes_per_sample); /* segtotal based on buffer-time latency */ spec->segtotal = spec->buffer_time / spec->latency_time; GST_DEBUG_OBJECT (src, "segsize %d, segtotal %d", spec->segsize, spec->segtotal); /* allocate the ringbuffer memory now */ buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize); memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data)); if ((res = gst_jack_audio_client_set_active (src->client, TRUE))) goto could_not_activate; /* if we need to automatically connect the ports, do so now. We must do this * after activating the client. */ if (src->connect == GST_JACK_CONNECT_AUTO) { /* find all the physical output ports. A physical output port is a port * associated with a hardware device. Someone needs connect to a physical * port in order to capture something. */ ports = jack_get_ports (client, NULL, NULL, JackPortIsPhysical | JackPortIsOutput); if (ports == NULL) { /* no ports? fine then we don't do anything except for posting a warning * message. */ GST_ELEMENT_WARNING (src, RESOURCE, NOT_FOUND, (NULL), ("No physical output ports found, leaving ports unconnected")); goto done; } for (i = 0; i < channels; i++) { /* stop when all output ports are exhausted */ if (ports[i] == NULL) { /* post a warning that we could not connect all ports */ GST_ELEMENT_WARNING (src, RESOURCE, NOT_FOUND, (NULL), ("No more physical ports, leaving some ports unconnected")); break; } GST_DEBUG_OBJECT (src, "try connecting to %s", jack_port_name (src->ports[i])); /* connect the physical port to a port */ res = jack_connect (client, ports[i], jack_port_name (src->ports[i])); g_print ("connecting to %s\n", jack_port_name (src->ports[i])); if (res != 0 && res != EEXIST) goto cannot_connect; } free (ports); } done: abuf->sample_rate = sample_rate; abuf->buffer_size = buffer_size; abuf->channels = spec->channels; return TRUE; /* ERRORS */ wrong_samplerate: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("Wrong samplerate, server is running at %d and we received %d", sample_rate, spec->rate)); return FALSE; } out_of_ports: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("Cannot allocate more Jack ports")); return FALSE; } could_not_activate: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("Could not activate client (%d:%s)", res, g_strerror (res))); return FALSE; } cannot_connect: { GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL), ("Could not connect input ports to physical ports (%d:%s)", res, g_strerror (res))); free (ports); return FALSE; } }
/* this is the callback of jack. This should RT-safe. */ static int jack_process_cb (jack_nframes_t nframes, void *arg) { GstJackAudioSink *sink; GstRingBuffer *buf; gint readseg, len; guint8 *readptr; gint i, j, flen, channels; sample_t *data; buf = GST_RING_BUFFER_CAST (arg); sink = GST_JACK_AUDIO_SINK (GST_OBJECT_PARENT (buf)); channels = buf->spec.channels; /* get target buffers */ for (i = 0; i < channels; i++) { sink->buffers[i] = (sample_t *) jack_port_get_buffer (sink->ports[i], nframes); } if (gst_ring_buffer_prepare_read (buf, &readseg, &readptr, &len)) { flen = len / channels; /* the number of samples must be exactly the segment size */ if (nframes * sizeof (sample_t) != flen) goto wrong_size; GST_DEBUG_OBJECT (sink, "copy %d frames: %p, %d bytes, %d channels", nframes, readptr, flen, channels); data = (sample_t *) readptr; /* the samples in the ringbuffer have the channels interleaved, we need to * deinterleave into the jack target buffers */ for (i = 0; i < nframes; i++) { for (j = 0; j < channels; j++) { sink->buffers[j][i] = *data++; } } /* clear written samples in the ringbuffer */ gst_ring_buffer_clear (buf, readseg); /* we wrote one segment */ gst_ring_buffer_advance (buf, 1); } else { GST_DEBUG_OBJECT (sink, "write %d frames silence", nframes); /* We are not allowed to read from the ringbuffer, write silence to all * jack output buffers */ for (i = 0; i < channels; i++) { memset (sink->buffers[i], 0, nframes * sizeof (sample_t)); } } return 0; /* ERRORS */ wrong_size: { GST_ERROR_OBJECT (sink, "nbytes (%d) != flen (%d)", (gint) (nframes * sizeof (sample_t)), flen); return 1; } }
static gboolean gst_ogg_avi_parse_setcaps (GstPad * pad, GstCaps * caps) { GstOggAviParse *ogg; GstStructure *structure; const GValue *codec_data; GstBuffer *buffer; GstMapInfo map; guint8 *ptr; gsize left; guint32 sizes[3]; GstCaps *outcaps; gint i, offs; ogg = GST_OGG_AVI_PARSE (GST_OBJECT_PARENT (pad)); structure = gst_caps_get_structure (caps, 0); /* take codec data */ codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data == NULL) goto no_data; /* only buffers are valid */ if (G_VALUE_TYPE (codec_data) != GST_TYPE_BUFFER) goto wrong_format; /* Now parse the data */ buffer = gst_value_get_buffer (codec_data); /* first 22 bytes are bits_per_sample, channel_mask, GUID * Then we get 3 LE guint32 with the 3 header sizes * then we get the bytes of the 3 headers. */ gst_buffer_map (buffer, &map, GST_MAP_READ); ptr = map.data; left = map.size; GST_LOG_OBJECT (ogg, "configuring codec_data of size %" G_GSIZE_FORMAT, left); /* skip headers */ ptr += 22; left -= 22; /* we need at least 12 bytes for the packet sizes of the 3 headers */ if (left < 12) goto buffer_too_small; /* read sizes of the 3 headers */ sizes[0] = GST_READ_UINT32_LE (ptr); sizes[1] = GST_READ_UINT32_LE (ptr + 4); sizes[2] = GST_READ_UINT32_LE (ptr + 8); GST_DEBUG_OBJECT (ogg, "header sizes: %u %u %u", sizes[0], sizes[1], sizes[2]); left -= 12; /* and we need at least enough data for all the headers */ if (left < sizes[0] + sizes[1] + sizes[2]) goto buffer_too_small; /* set caps */ outcaps = gst_caps_new_empty_simple ("audio/x-vorbis"); gst_pad_set_caps (ogg->srcpad, outcaps); gst_caps_unref (outcaps); /* copy header data */ offs = 34; for (i = 0; i < 3; i++) { GstBuffer *out; /* now output the raw vorbis header packets */ out = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offs, sizes[i]); gst_pad_push (ogg->srcpad, out); offs += sizes[i]; } gst_buffer_unmap (buffer, &map); return TRUE; /* ERRORS */ no_data: { GST_DEBUG_OBJECT (ogg, "no codec_data found in caps"); return FALSE; } wrong_format: { GST_DEBUG_OBJECT (ogg, "codec_data is not a buffer"); return FALSE; } buffer_too_small: { GST_DEBUG_OBJECT (ogg, "codec_data is too small"); gst_buffer_unmap (buffer, &map); return FALSE; } }