static void gst_identity_update_last_message_for_buffer (GstIdentity * identity, const gchar * action, GstBuffer * buf, gsize size) { gchar dts_str[64], pts_str[64], dur_str[64]; gchar *flag_str; GST_OBJECT_LOCK (identity); flag_str = gst_buffer_get_flags_string (buf); g_free (identity->last_message); identity->last_message = g_strdup_printf ("%s ******* (%s:%s) " "(%" G_GSIZE_FORMAT " bytes, dts: %s, pts:%s, duration: %s, offset: %" G_GINT64_FORMAT ", " "offset_end: % " G_GINT64_FORMAT ", flags: %08x %s) %p", action, GST_DEBUG_PAD_NAME (GST_BASE_TRANSFORM_CAST (identity)->sinkpad), size, print_pretty_time (dts_str, sizeof (dts_str), GST_BUFFER_DTS (buf)), print_pretty_time (pts_str, sizeof (pts_str), GST_BUFFER_PTS (buf)), print_pretty_time (dur_str, sizeof (dur_str), GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), flag_str, buf); g_free (flag_str); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); }
static void gst_video_detect_post_message (GstVideoDetect * videodetect, GstBuffer * buffer, guint64 data) { GstBaseTransform *trans; GstMessage *m; guint64 duration, timestamp, running_time, stream_time; trans = GST_BASE_TRANSFORM_CAST (videodetect); /* get timestamps */ timestamp = GST_BUFFER_TIMESTAMP (buffer); duration = GST_BUFFER_DURATION (buffer); running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, timestamp); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); /* post message */ m = gst_message_new_element (GST_OBJECT_CAST (videodetect), gst_structure_new ("GstVideoDetect", "have-pattern", G_TYPE_BOOLEAN, videodetect->in_pattern, "timestamp", G_TYPE_UINT64, timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, "data-uint64", G_TYPE_UINT64, data, "data", G_TYPE_UINT, (guint) MIN (data, G_MAXINT), NULL)); gst_element_post_message (GST_ELEMENT_CAST (videodetect), m); }
static void gst_video_analyse_post_message (GstVideoAnalyse * videoanalyse, GstBuffer * buffer) { GstBaseTransform *trans; GstMessage *m; guint64 duration, timestamp, running_time, stream_time; trans = GST_BASE_TRANSFORM_CAST (videoanalyse); /* get timestamps */ timestamp = GST_BUFFER_TIMESTAMP (buffer); duration = GST_BUFFER_DURATION (buffer); running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, timestamp); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); m = gst_message_new_element (GST_OBJECT_CAST (videoanalyse), gst_structure_new ("GstVideoAnalyse", "timestamp", G_TYPE_UINT64, timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, "brightness", G_TYPE_DOUBLE, videoanalyse->brightness, "brightness-variance", G_TYPE_DOUBLE, videoanalyse->brightness_var, NULL)); gst_element_post_message (GST_ELEMENT_CAST (videoanalyse), m); }
static void gst_video_scale_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstVideoScale *vscale = GST_VIDEO_SCALE (object); switch (prop_id) { case PROP_METHOD: GST_OBJECT_LOCK (vscale); vscale->method = g_value_get_enum (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_ADD_BORDERS: GST_OBJECT_LOCK (vscale); vscale->add_borders = g_value_get_boolean (value); GST_OBJECT_UNLOCK (vscale); gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (vscale)); break; case PROP_SHARPNESS: GST_OBJECT_LOCK (vscale); vscale->sharpness = g_value_get_double (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_SHARPEN: GST_OBJECT_LOCK (vscale); vscale->sharpen = g_value_get_double (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_DITHER: GST_OBJECT_LOCK (vscale); vscale->dither = g_value_get_boolean (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_SUBMETHOD: GST_OBJECT_LOCK (vscale); vscale->submethod = g_value_get_int (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_ENVELOPE: GST_OBJECT_LOCK (vscale); vscale->envelope = g_value_get_double (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_GAMMA_DECODE: GST_OBJECT_LOCK (vscale); vscale->gamma_decode = g_value_get_boolean (value); GST_OBJECT_UNLOCK (vscale); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static void gst_identity_update_last_message_for_buffer (GstIdentity * identity, const gchar * action, GstBuffer * buf, gsize size) { gchar dts_str[64], pts_str[64], dur_str[64]; gchar flag_str[100]; GST_OBJECT_LOCK (identity); { const char *flag_list[] = { "", "", "", "", "live", "decode-only", "discont", "resync", "corrupted", "marker", "header", "gap", "droppable", "delta-unit", "tag-memory", "FIXME" }; int i; char *end = flag_str; end[0] = '\0'; for (i = 0; i < G_N_ELEMENTS (flag_list); i++) { if (GST_MINI_OBJECT_CAST (buf)->flags & (1 << i)) { strcpy (end, flag_list[i]); end += strlen (end); end[0] = ' '; end[1] = '\0'; end++; } } } g_free (identity->last_message); identity->last_message = g_strdup_printf ("%s ******* (%s:%s) " "(%" G_GSIZE_FORMAT " bytes, dts: %s, pts:%s, duration: %s, offset: %" G_GINT64_FORMAT ", " "offset_end: % " G_GINT64_FORMAT ", flags: %08x %s) %p", action, GST_DEBUG_PAD_NAME (GST_BASE_TRANSFORM_CAST (identity)->sinkpad), size, print_pretty_time (dts_str, sizeof (dts_str), GST_BUFFER_DTS (buf)), print_pretty_time (pts_str, sizeof (pts_str), GST_BUFFER_PTS (buf)), print_pretty_time (dur_str, sizeof (dur_str), GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), flag_str, buf); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); }
static void gst_identity_init (GstIdentity * identity) { identity->sleep_time = DEFAULT_SLEEP_TIME; identity->error_after = DEFAULT_ERROR_AFTER; identity->drop_probability = DEFAULT_DROP_PROBABILITY; identity->datarate = DEFAULT_DATARATE; identity->silent = DEFAULT_SILENT; identity->single_segment = DEFAULT_SINGLE_SEGMENT; identity->sync = DEFAULT_SYNC; identity->check_imperfect_timestamp = DEFAULT_CHECK_IMPERFECT_TIMESTAMP; identity->check_imperfect_offset = DEFAULT_CHECK_IMPERFECT_OFFSET; identity->dump = DEFAULT_DUMP; identity->last_message = NULL; identity->signal_handoffs = DEFAULT_SIGNAL_HANDOFFS; gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM_CAST (identity), TRUE); }
static void gst_motiondetect_init (StbtMotionDetect * filter, StbtMotionDetectClass * gclass) { filter->enabled = FALSE; filter->state = MOTION_DETECT_STATE_INITIALISING; filter->cvCurrentImage = NULL; filter->cvReferenceImageGray = NULL; filter->cvCurrentImageGray = NULL; filter->cvMaskImage = NULL; filter->cvInvertedMaskImage = NULL; filter->mask = NULL; filter->debugDirectory = NULL; filter->noiseThreshold = DEFAULT_NOISE_THRESHOLD; filter->display = TRUE; gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM_CAST (filter), TRUE); }
static GstMessage * gst_face_detect_message_new (GstFaceDetect * filter, GstBuffer * buf) { GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (filter); GstStructure *s; GstClockTime running_time, stream_time; running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf)); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf)); s = gst_structure_new ("facedetect", "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP (buf), "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, GST_BUFFER_DURATION (buf), NULL); return gst_message_new_element (GST_OBJECT (filter), s); }
static GstFlowReturn gst_identity_do_sync (GstIdentity * identity, GstClockTime running_time) { GstFlowReturn ret = GST_FLOW_OK; if (identity->sync && GST_BASE_TRANSFORM_CAST (identity)->segment.format == GST_FORMAT_TIME) { GstClock *clock; GST_OBJECT_LOCK (identity); while (identity->blocked) g_cond_wait (&identity->blocked_cond, GST_OBJECT_GET_LOCK (identity)); if ((clock = GST_ELEMENT (identity)->clock)) { GstClockReturn cret; GstClockTime timestamp; timestamp = running_time + GST_ELEMENT (identity)->base_time + identity->upstream_latency; /* save id if we need to unlock */ identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp); GST_OBJECT_UNLOCK (identity); cret = gst_clock_id_wait (identity->clock_id, NULL); GST_OBJECT_LOCK (identity); if (identity->clock_id) { gst_clock_id_unref (identity->clock_id); identity->clock_id = NULL; } if (cret == GST_CLOCK_UNSCHEDULED) ret = GST_FLOW_EOS; } GST_OBJECT_UNLOCK (identity); } return ret; }
static void gst_identity_update_last_message_for_buffer (GstIdentity * identity, const gchar * action, GstBuffer * buf) { gchar ts_str[64], dur_str[64]; GST_OBJECT_LOCK (identity); g_free (identity->last_message); identity->last_message = g_strdup_printf ("%s ******* (%s:%s)i " "(%u bytes, timestamp: %s, duration: %s, offset: %" G_GINT64_FORMAT ", " "offset_end: % " G_GINT64_FORMAT ", flags: %d) %p", action, GST_DEBUG_PAD_NAME (GST_BASE_TRANSFORM_CAST (identity)->sinkpad), GST_BUFFER_SIZE (buf), print_pretty_time (ts_str, sizeof (ts_str), GST_BUFFER_TIMESTAMP (buf)), print_pretty_time (dur_str, sizeof (dur_str), GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); }
static void gst_video_scale_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstVideoScale *vscale = GST_VIDEO_SCALE (object); switch (prop_id) { case PROP_METHOD: GST_OBJECT_LOCK (vscale); vscale->method = g_value_get_enum (value); GST_OBJECT_UNLOCK (vscale); break; case PROP_ADD_BORDERS: GST_OBJECT_LOCK (vscale); vscale->add_borders = g_value_get_boolean (value); GST_OBJECT_UNLOCK (vscale); gst_base_transform_reconfigure (GST_BASE_TRANSFORM_CAST (vscale)); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
static GstMessage * gst_level_message_new (GstLevel * level, GstClockTime timestamp, GstClockTime duration) { GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (level); GstStructure *s; GValue v = { 0, }; GstClockTime endtime, running_time, stream_time; running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, timestamp); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); /* endtime is for backwards compatibility */ endtime = stream_time + duration; s = gst_structure_new ("level", "endtime", GST_TYPE_CLOCK_TIME, endtime, "timestamp", G_TYPE_UINT64, timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, NULL); g_value_init (&v, G_TYPE_VALUE_ARRAY); g_value_take_boxed (&v, g_value_array_new (0)); gst_structure_take_value (s, "rms", &v); g_value_init (&v, G_TYPE_VALUE_ARRAY); g_value_take_boxed (&v, g_value_array_new (0)); gst_structure_take_value (s, "peak", &v); g_value_init (&v, G_TYPE_VALUE_ARRAY); g_value_take_boxed (&v, g_value_array_new (0)); gst_structure_take_value (s, "decay", &v); return gst_message_new_element (GST_OBJECT (level), s); }
void gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self) { GstBuffer *outbuf; GstFlowReturn res; gint rate = GST_AUDIO_FILTER_RATE (self); gint channels = GST_AUDIO_FILTER_CHANNELS (self); gint bps = GST_AUDIO_FILTER_BPS (self); gint outsize, outsamples; GstMapInfo map; guint8 *in, *out; if (channels == 0 || rate == 0 || self->nsamples_in == 0) { self->buffer_fill = 0; g_free (self->buffer); self->buffer = NULL; return; } /* Calculate the number of samples and their memory size that * should be pushed from the residue */ outsamples = self->nsamples_in - (self->nsamples_out - self->latency); if (outsamples <= 0) { self->buffer_fill = 0; g_free (self->buffer); self->buffer = NULL; return; } outsize = outsamples * channels * bps; if (!self->fft || self->low_latency) { gint64 diffsize, diffsamples; /* Process the difference between latency and residue length samples * to start at the actual data instead of starting at the zeros before * when we only got one buffer smaller than latency */ diffsamples = ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels; if (diffsamples > 0) { diffsize = diffsamples * channels * bps; in = g_new0 (guint8, diffsize); out = g_new0 (guint8, diffsize); self->nsamples_out += self->process (self, in, out, diffsamples); g_free (in); g_free (out); } outbuf = gst_buffer_new_and_alloc (outsize); /* Convolve the residue with zeros to get the actual remaining data */ in = g_new0 (guint8, outsize); gst_buffer_map (outbuf, &map, GST_MAP_READWRITE); self->nsamples_out += self->process (self, in, map.data, outsamples); gst_buffer_unmap (outbuf, &map); g_free (in); } else { guint gensamples = 0; outbuf = gst_buffer_new_and_alloc (outsize); gst_buffer_map (outbuf, &map, GST_MAP_READWRITE); while (gensamples < outsamples) { guint step_insamples = self->block_length - self->buffer_fill; guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps); guint8 *out = g_new (guint8, self->block_length * channels * bps); guint step_gensamples; step_gensamples = self->process (self, zeroes, out, step_insamples); g_free (zeroes); memcpy (map.data + gensamples * bps, out, MIN (step_gensamples, outsamples - gensamples) * bps); gensamples += MIN (step_gensamples, outsamples - gensamples); g_free (out); } self->nsamples_out += gensamples; gst_buffer_unmap (outbuf, &map); } /* Set timestamp, offset, etc from the values we * saved when processing the regular buffers */ if (GST_CLOCK_TIME_IS_VALID (self->start_ts)) GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts; else GST_BUFFER_TIMESTAMP (outbuf) = 0; GST_BUFFER_TIMESTAMP (outbuf) += gst_util_uint64_scale_int (self->nsamples_out - outsamples - self->latency, GST_SECOND, rate); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (outsamples, GST_SECOND, rate); if (self->start_off != GST_BUFFER_OFFSET_NONE) { GST_BUFFER_OFFSET (outbuf) = self->start_off + self->nsamples_out - outsamples - self->latency; GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples; } GST_DEBUG_OBJECT (self, "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d", gst_buffer_get_size (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf), outsamples); res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf); if (G_UNLIKELY (res != GST_FLOW_OK)) { GST_WARNING_OBJECT (self, "failed to push residue"); } self->buffer_fill = 0; }
static GstMessage * gst_spectrum_message_new (GstSpectrum * spectrum, GstClockTime timestamp, GstClockTime duration) { GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (spectrum); GstSpectrumChannel *cd; GstStructure *s; GValue *mcv = NULL, *pcv = NULL; GstClockTime endtime, running_time, stream_time; GST_DEBUG_OBJECT (spectrum, "preparing message, bands =%d ", spectrum->bands); running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, timestamp); stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp); /* endtime is for backwards compatibility */ endtime = stream_time + duration; s = gst_structure_new ("spectrum", "endtime", GST_TYPE_CLOCK_TIME, endtime, "timestamp", G_TYPE_UINT64, timestamp, "stream-time", G_TYPE_UINT64, stream_time, "running-time", G_TYPE_UINT64, running_time, "duration", G_TYPE_UINT64, duration, NULL); if (!spectrum->multi_channel) { cd = &spectrum->channel_data[0]; if (spectrum->message_magnitude) { /* FIXME 0.11: this should be an array, not a list */ mcv = gst_spectrum_message_add_container (s, GST_TYPE_LIST, "magnitude"); gst_spectrum_message_add_list (mcv, cd->spect_magnitude, spectrum->bands); } if (spectrum->message_phase) { /* FIXME 0.11: this should be an array, not a list */ pcv = gst_spectrum_message_add_container (s, GST_TYPE_LIST, "phase"); gst_spectrum_message_add_list (pcv, cd->spect_phase, spectrum->bands); } } else { guint c; guint channels = GST_AUDIO_FILTER (spectrum)->format.channels; if (spectrum->message_magnitude) { mcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "magnitude"); } if (spectrum->message_phase) { pcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "phase"); } for (c = 0; c < channels; c++) { cd = &spectrum->channel_data[c]; if (spectrum->message_magnitude) { gst_spectrum_message_add_array (mcv, cd->spect_magnitude, spectrum->bands); } if (spectrum->message_phase) { gst_spectrum_message_add_array (pcv, cd->spect_magnitude, spectrum->bands); } } } return gst_message_new_element (GST_OBJECT (spectrum), s); }