void gst_base_video_encoder_set_latency (GstBaseVideoEncoder * base_video_encoder, GstClockTime min_latency, GstClockTime max_latency) { g_return_if_fail (min_latency >= 0); g_return_if_fail (max_latency >= min_latency); base_video_encoder->min_latency = min_latency; base_video_encoder->max_latency = max_latency; gst_element_post_message (GST_ELEMENT_CAST (base_video_encoder), gst_message_new_latency (GST_OBJECT_CAST (base_video_encoder))); }
gboolean owr_media_renderer_set_source(OwrMediaRenderer *renderer, OwrMediaSource *source) { OwrMediaRendererPrivate *priv; gboolean ret = TRUE; GstPad *srcpad, *sinkpad; GstCaps *caps; GstPadLinkReturn pad_link_return; g_assert(renderer); g_assert(source); priv = renderer->priv; g_mutex_lock(&priv->media_renderer_lock); if (priv->source) { unlink_source(renderer); g_object_unref(priv->source); priv->source = NULL; } g_mutex_unlock(&priv->media_renderer_lock); /* FIXME - too much locking/unlocking of the same lock across private API? */ sinkpad = _owr_media_renderer_get_pad(renderer); g_assert(sinkpad); caps = OWR_MEDIA_RENDERER_GET_CLASS(renderer)->get_caps(renderer); srcpad = _owr_media_source_get_pad(source, caps); gst_caps_unref(caps); g_assert(srcpad); g_mutex_lock(&priv->media_renderer_lock); pad_link_return = gst_pad_link(srcpad, sinkpad); if (pad_link_return != GST_PAD_LINK_OK) { GST_ERROR("Failed to link source with renderer (%d)", pad_link_return); ret = FALSE; goto done; } gst_element_post_message(_owr_get_pipeline(), gst_message_new_latency(GST_OBJECT(_owr_get_pipeline()))); priv->source = g_object_ref(source); done: priv->srcpad = srcpad; priv->sinkpad = sinkpad; g_mutex_unlock(&priv->media_renderer_lock); return ret; }
static void gst_video_rate_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstVideoRate *videorate = GST_VIDEO_RATE (object); gboolean latency_changed = FALSE; GST_OBJECT_LOCK (videorate); switch (prop_id) { case PROP_SILENT: videorate->silent = g_value_get_boolean (value); break; case PROP_NEW_PREF: videorate->new_pref = g_value_get_double (value); break; case PROP_SKIP_TO_FIRST: videorate->skip_to_first = g_value_get_boolean (value); break; case PROP_DROP_ONLY:{ gboolean new_value = g_value_get_boolean (value); /* Latency changes if we switch drop-only mode */ latency_changed = new_value != videorate->drop_only; videorate->drop_only = g_value_get_boolean (value); goto reconfigure; } case PROP_AVERAGE_PERIOD: videorate->average_period_set = g_value_get_uint64 (value); break; case PROP_MAX_RATE: g_atomic_int_set (&videorate->max_rate, g_value_get_int (value)); goto reconfigure; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } GST_OBJECT_UNLOCK (videorate); return; reconfigure: GST_OBJECT_UNLOCK (videorate); gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (videorate)); if (latency_changed) { gst_element_post_message (GST_ELEMENT (videorate), gst_message_new_latency (GST_OBJECT (videorate))); } }
static void gst_app_src_set_latencies (GstAppSrc * appsrc, gboolean do_min, guint64 min, gboolean do_max, guint64 max) { gboolean changed = FALSE; g_mutex_lock (appsrc->priv->mutex); if (do_min && appsrc->priv->min_latency != min) { appsrc->priv->min_latency = min; changed = TRUE; } if (do_max && appsrc->priv->max_latency != max) { appsrc->priv->max_latency = max; changed = TRUE; } g_mutex_unlock (appsrc->priv->mutex); if (changed) { GST_DEBUG_OBJECT (appsrc, "posting latency changed"); gst_element_post_message (GST_ELEMENT_CAST (appsrc), gst_message_new_latency (GST_OBJECT_CAST (appsrc))); } }
void gst_audio_fx_base_fir_filter_set_kernel (GstAudioFXBaseFIRFilter * self, gdouble * kernel, guint kernel_length, guint64 latency) { g_return_if_fail (kernel != NULL); g_return_if_fail (self != NULL); GST_BASE_TRANSFORM_LOCK (self); if (self->residue) { gst_audio_fx_base_fir_filter_push_residue (self); self->next_ts = GST_CLOCK_TIME_NONE; self->next_off = GST_BUFFER_OFFSET_NONE; self->residue_length = 0; } g_free (self->kernel); g_free (self->residue); self->kernel = kernel; self->kernel_length = kernel_length; if (GST_AUDIO_FILTER (self)->format.channels) { self->residue = g_new0 (gdouble, kernel_length * GST_AUDIO_FILTER (self)->format.channels); self->residue_length = 0; } if (self->latency != latency) { self->latency = latency; gst_element_post_message (GST_ELEMENT (self), gst_message_new_latency (GST_OBJECT (self))); } GST_BASE_TRANSFORM_UNLOCK (self); }
void gst_audio_fx_base_fir_filter_set_kernel (GstAudioFXBaseFIRFilter * self, gdouble * kernel, guint kernel_length, guint64 latency, const GstAudioInfo * info) { gboolean latency_changed; GstAudioFormat format; gint channels; g_return_if_fail (kernel != NULL); g_return_if_fail (self != NULL); g_mutex_lock (&self->lock); latency_changed = (self->latency != latency || (!self->low_latency && self->kernel_length < FFT_THRESHOLD && kernel_length >= FFT_THRESHOLD) || (!self->low_latency && self->kernel_length >= FFT_THRESHOLD && kernel_length < FFT_THRESHOLD)); /* FIXME: If the latency changes, the buffer size changes too and we * have to drain in any case until this is fixed in the future */ if (self->buffer && (!self->drain_on_changes || latency_changed)) { gst_audio_fx_base_fir_filter_push_residue (self); self->start_ts = GST_CLOCK_TIME_NONE; self->start_off = GST_BUFFER_OFFSET_NONE; self->nsamples_out = 0; self->nsamples_in = 0; self->buffer_fill = 0; } g_free (self->kernel); if (!self->drain_on_changes || latency_changed) { g_free (self->buffer); self->buffer = NULL; self->buffer_fill = 0; self->buffer_length = 0; } self->kernel = kernel; self->kernel_length = kernel_length; if (info) { format = GST_AUDIO_INFO_FORMAT (info); channels = GST_AUDIO_INFO_CHANNELS (info); } else { format = GST_AUDIO_FILTER_FORMAT (self); channels = GST_AUDIO_FILTER_CHANNELS (self); } gst_audio_fx_base_fir_filter_calculate_frequency_response (self); gst_audio_fx_base_fir_filter_select_process_function (self, format, channels); if (latency_changed) { self->latency = latency; gst_element_post_message (GST_ELEMENT (self), gst_message_new_latency (GST_OBJECT (self))); } g_mutex_unlock (&self->lock); }
static VALUE latency_initialize(VALUE self, VALUE src) { G_INITIALIZE(self, gst_message_new_latency(RVAL2GST_OBJ(src))); return Qnil; }
static GstFlowReturn gst_video_rate_transform_ip (GstBaseTransform * trans, GstBuffer * buffer) { GstVideoRate *videorate; GstFlowReturn res = GST_BASE_TRANSFORM_FLOW_DROPPED; GstClockTime intime, in_ts, in_dur; GstClockTime avg_period; gboolean skip = FALSE; videorate = GST_VIDEO_RATE (trans); /* make sure the denominators are not 0 */ if (videorate->from_rate_denominator == 0 || videorate->to_rate_denominator == 0) goto not_negotiated; GST_OBJECT_LOCK (videorate); avg_period = videorate->average_period_set; GST_OBJECT_UNLOCK (videorate); /* MT-safe switching between modes */ if (G_UNLIKELY (avg_period != videorate->average_period)) { gboolean switch_mode = (avg_period == 0 || videorate->average_period == 0); videorate->average_period = avg_period; videorate->last_ts = GST_CLOCK_TIME_NONE; if (switch_mode) { if (avg_period) { /* enabling average mode */ videorate->average = 0; /* make sure no cached buffers from regular mode are left */ gst_video_rate_swap_prev (videorate, NULL, 0); } else { /* enable regular mode */ videorate->next_ts = GST_CLOCK_TIME_NONE; skip = TRUE; } /* max averaging mode has a no latency, normal mode does */ gst_element_post_message (GST_ELEMENT (videorate), gst_message_new_latency (GST_OBJECT (videorate))); } } if (videorate->average_period > 0) return gst_video_rate_trans_ip_max_avg (videorate, buffer); in_ts = GST_BUFFER_TIMESTAMP (buffer); in_dur = GST_BUFFER_DURATION (buffer); if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) { in_ts = videorate->last_ts; if (G_UNLIKELY (in_ts == GST_CLOCK_TIME_NONE)) goto invalid_buffer; } /* get the time of the next expected buffer timestamp, we use this when the * next buffer has -1 as a timestamp */ videorate->last_ts = in_ts; if (in_dur != GST_CLOCK_TIME_NONE) videorate->last_ts += in_dur; GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (in_ts)); /* the input time is the time in the segment + all previously accumulated * segments */ intime = in_ts + videorate->segment.base; /* we need to have two buffers to compare */ if (videorate->prevbuf == NULL) { gst_video_rate_swap_prev (videorate, buffer, intime); videorate->in++; if (!GST_CLOCK_TIME_IS_VALID (videorate->next_ts)) { /* new buffer, we expect to output a buffer that matches the first * timestamp in the segment */ if (videorate->skip_to_first || skip) { videorate->next_ts = intime; videorate->base_ts = in_ts - videorate->segment.start; videorate->out_frame_count = 0; } else { videorate->next_ts = videorate->segment.start + videorate->segment.base; } } } else { GstClockTime prevtime; gint count = 0; gint64 diff1, diff2; prevtime = videorate->prev_ts; GST_LOG_OBJECT (videorate, "BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime), GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts)); videorate->in++; /* drop new buffer if it's before previous one */ if (intime < prevtime) { GST_DEBUG_OBJECT (videorate, "The new buffer (%" GST_TIME_FORMAT ") is before the previous buffer (%" GST_TIME_FORMAT "). Dropping new buffer.", GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime)); videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); goto done; } /* got 2 buffers, see which one is the best */ do { diff1 = prevtime - videorate->next_ts; diff2 = intime - videorate->next_ts; /* take absolute values, beware: abs and ABS don't work for gint64 */ if (diff1 < 0) diff1 = -diff1; if (diff2 < 0) diff2 = -diff2; GST_LOG_OBJECT (videorate, "diff with prev %" GST_TIME_FORMAT " diff with new %" GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts)); /* output first one when its the best */ if (diff1 <= diff2) { GstFlowReturn r; count++; /* on error the _flush function posted a warning already */ if ((r = gst_video_rate_flush_prev (videorate, count > 1)) != GST_FLOW_OK) { res = r; goto done; } } /* Do not produce any dups. We can exit loop now */ if (videorate->drop_only) break; /* continue while the first one was the best, if they were equal avoid * going into an infinite loop */ } while (diff1 < diff2); /* if we outputed the first buffer more then once, we have dups */ if (count > 1) { videorate->dup += count - 1; if (!videorate->silent) gst_video_rate_notify_duplicate (videorate); } /* if we didn't output the first buffer, we have a drop */ else if (count == 0) { videorate->drop++; if (!videorate->silent) gst_video_rate_notify_drop (videorate); GST_LOG_OBJECT (videorate, "new is best, old never used, drop, outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts)); } GST_LOG_OBJECT (videorate, "END, putting new in old, diff1 %" GST_TIME_FORMAT ", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT ", in %" G_GUINT64_FORMAT ", out %" G_GUINT64_FORMAT ", drop %" G_GUINT64_FORMAT ", dup %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts), videorate->in, videorate->out, videorate->drop, videorate->dup); /* swap in new one when it's the best */ gst_video_rate_swap_prev (videorate, buffer, intime); } done: return res; /* ERRORS */ not_negotiated: { GST_WARNING_OBJECT (videorate, "no framerate negotiated"); res = GST_FLOW_NOT_NEGOTIATED; goto done; } invalid_buffer: { GST_WARNING_OBJECT (videorate, "Got buffer with GST_CLOCK_TIME_NONE timestamp, discarding it"); res = GST_BASE_TRANSFORM_FLOW_DROPPED; goto done; } }
static GstFlowReturn gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer) { GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc); GstFlowReturn flow_ret = GST_FLOW_OK; const guint8 *data; gsize data_size; VideoFrame *vf; CaptureFrame *f; GstCaps *caps; gboolean caps_changed = FALSE; g_mutex_lock (&self->lock); while (g_queue_is_empty (&self->current_frames) && !self->flushing) { g_cond_wait (&self->cond, &self->lock); } f = (CaptureFrame *) g_queue_pop_head (&self->current_frames); g_mutex_unlock (&self->lock); if (self->flushing) { if (f) capture_frame_free (f); GST_DEBUG_OBJECT (self, "Flushing"); return GST_FLOW_FLUSHING; } // If we're not flushing, we should have a valid frame from the queue g_assert (f != NULL); g_mutex_lock (&self->lock); if (self->caps_mode != f->mode) { if (self->mode == GST_DECKLINK_MODE_AUTO) { GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode, f->mode); caps_changed = TRUE; self->caps_mode = f->mode; } else { g_mutex_unlock (&self->lock); GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Invalid mode in captured frame"), ("Mode set to %d but captured %d", self->caps_mode, f->mode)); capture_frame_free (f); return GST_FLOW_NOT_NEGOTIATED; } } if (self->caps_format != f->format) { if (self->video_format == GST_DECKLINK_VIDEO_FORMAT_AUTO) { GST_DEBUG_OBJECT (self, "Format changed from %d to %d", self->caps_format, f->format); caps_changed = TRUE; self->caps_format = f->format; } else { g_mutex_unlock (&self->lock); GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Invalid pixel format in captured frame"), ("Format set to %d but captured %d", self->caps_format, f->format)); capture_frame_free (f); return GST_FLOW_NOT_NEGOTIATED; } } g_mutex_unlock (&self->lock); if (caps_changed) { caps = gst_decklink_mode_get_caps (f->mode, f->format); gst_video_info_from_caps (&self->info, caps); gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps); gst_element_post_message (GST_ELEMENT_CAST (self), gst_message_new_latency (GST_OBJECT_CAST (self))); gst_caps_unref (caps); } f->frame->GetBytes ((gpointer *) & data); data_size = self->info.size; vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame)); *buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY, (gpointer) data, data_size, 0, data_size, vf, (GDestroyNotify) video_frame_free); vf->frame = f->frame; f->frame->AddRef (); vf->input = self->input->input; vf->input->AddRef (); GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time; GST_BUFFER_DURATION (*buffer) = f->capture_duration; gst_buffer_add_video_time_code_meta (*buffer, f->tc); GST_DEBUG_OBJECT (self, "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer))); capture_frame_free (f); return flow_ret; }
LatencyMessagePtr LatencyMessage::create(const ObjectPtr & source) { return LatencyMessagePtr::wrap(gst_message_new_latency(source), false); }
static GstFlowReturn gst_decklink_video_src_create (GstPushSrc * bsrc, GstBuffer ** buffer) { GstDecklinkVideoSrc *self = GST_DECKLINK_VIDEO_SRC_CAST (bsrc); GstFlowReturn flow_ret = GST_FLOW_OK; const guint8 *data; gsize data_size; VideoFrame *vf; CaptureFrame *f; GstCaps *caps; g_mutex_lock (&self->lock); while (g_queue_is_empty (&self->current_frames) && !self->flushing) { g_cond_wait (&self->cond, &self->lock); } f = (CaptureFrame *) g_queue_pop_head (&self->current_frames); g_mutex_unlock (&self->lock); if (self->flushing) { if (f) capture_frame_free (f); GST_DEBUG_OBJECT (self, "Flushing"); return GST_FLOW_FLUSHING; } g_mutex_lock (&self->lock); if (self->mode == GST_DECKLINK_MODE_AUTO && self->caps_mode != f->mode) { GST_DEBUG_OBJECT (self, "Mode changed from %d to %d", self->caps_mode, f->mode); self->caps_mode = f->mode; g_mutex_unlock (&self->lock); caps = gst_decklink_mode_get_caps (f->mode); gst_video_info_from_caps (&self->info, caps); gst_base_src_set_caps (GST_BASE_SRC_CAST (bsrc), caps); gst_element_post_message (GST_ELEMENT_CAST (self), gst_message_new_latency (GST_OBJECT_CAST (self))); gst_caps_unref (caps); } else { g_mutex_unlock (&self->lock); } f->frame->GetBytes ((gpointer *) & data); data_size = self->info.size; vf = (VideoFrame *) g_malloc0 (sizeof (VideoFrame)); *buffer = gst_buffer_new_wrapped_full ((GstMemoryFlags) GST_MEMORY_FLAG_READONLY, (gpointer) data, data_size, 0, data_size, vf, (GDestroyNotify) video_frame_free); vf->frame = f->frame; f->frame->AddRef (); vf->input = self->input->input; vf->input->AddRef (); GST_BUFFER_TIMESTAMP (*buffer) = f->capture_time; GST_BUFFER_DURATION (*buffer) = f->capture_duration; GST_DEBUG_OBJECT (self, "Outputting buffer %p with timestamp %" GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, *buffer, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (*buffer))); capture_frame_free (f); return flow_ret; }