static gboolean aggregate_func (GstAggregator * self) { GstAggregatorPrivate *priv = self->priv; GstAggregatorClass *klass = GST_AGGREGATOR_GET_CLASS (self); GST_LOG_OBJECT (self, "Checking aggregate"); while (priv->send_eos && gst_aggregator_iterate_sinkpads (self, (GstAggregatorPadForeachFunc) _check_all_pads_with_data_or_eos, NULL) && priv->running) { GST_TRACE_OBJECT (self, "Actually aggregating!"); priv->flow_return = klass->aggregate (self); if (priv->flow_return == GST_FLOW_EOS) { g_main_context_wakeup (self->priv->mcontext); _remove_all_sources (self); _push_eos (self); } if (priv->flow_return == GST_FLOW_FLUSHING && g_atomic_int_get (&priv->flush_seeking)) priv->flow_return = GST_FLOW_OK; GST_LOG_OBJECT (self, "flow return is %s", gst_flow_get_name (priv->flow_return)); if (priv->flow_return != GST_FLOW_OK) break; } return G_SOURCE_REMOVE; }
static gboolean _stop (GstAggregator * agg) { _reset_flow_values (agg); gst_aggregator_iterate_sinkpads (agg, (GstAggregatorPadForeachFunc) _flush_pad, NULL); return TRUE; }
static gboolean gst_gl_mixer_stop (GstAggregator * agg) { GstGLMixer *mix = GST_GL_MIXER (agg); GstGLMixerClass *mixer_class = GST_GL_MIXER_GET_CLASS (mix); if (!GST_AGGREGATOR_CLASS (parent_class)->stop (agg)) return FALSE; GST_OBJECT_LOCK (agg); g_ptr_array_free (mix->frames, TRUE); mix->frames = NULL; g_ptr_array_free (mix->array_buffers, TRUE); mix->array_buffers = NULL; GST_OBJECT_UNLOCK (agg); if (mixer_class->reset) mixer_class->reset (mix); if (mix->fbo) { gst_gl_context_del_fbo (mix->context, mix->fbo, mix->depthbuffer); mix->fbo = 0; mix->depthbuffer = 0; } if (mix->download) { gst_object_unref (mix->download); mix->download = NULL; } gst_aggregator_iterate_sinkpads (GST_AGGREGATOR (mix), _clean_upload, NULL); if (mix->priv->query) { gst_query_unref (mix->priv->query); mix->priv->query = NULL; } if (mix->priv->pool) { gst_object_unref (mix->priv->pool); mix->priv->pool = NULL; } if (mix->display) { gst_object_unref (mix->display); mix->display = NULL; } if (mix->context) { gst_object_unref (mix->context); mix->context = NULL; } gst_gl_mixer_reset (mix); return TRUE; }
static gboolean _stop (GstAggregator * agg) { _reset_flow_values (agg); gst_aggregator_iterate_sinkpads (agg, (GstAggregatorPadForeachFunc) _stop_pad, NULL); if (agg->priv->tags) gst_tag_list_unref (agg->priv->tags); agg->priv->tags = NULL; return TRUE; }
static void _reset_gl (GstGLContext * context, GstGLVideoMixer * video_mixer) { const GstGLFuncs *gl = GST_GL_BASE_MIXER (video_mixer)->context->gl_vtable; if (video_mixer->vao) { gl->DeleteVertexArrays (1, &video_mixer->vao); video_mixer->vao = 0; } if (video_mixer->vbo_indices) { gl->DeleteBuffers (1, &video_mixer->vbo_indices); video_mixer->vbo_indices = 0; } gst_aggregator_iterate_sinkpads (GST_AGGREGATOR (video_mixer), _reset_pad_gl, NULL); }
static gboolean gst_aggregator_query_latency (GstAggregator * self, GstQuery * query) { LatencyData data; data.min = 0; data.max = GST_CLOCK_TIME_NONE; data.live = FALSE; /* query upstream's latency */ gst_aggregator_iterate_sinkpads (self, (GstAggregatorPadForeachFunc) _latency_query, &data); if (data.live && GST_CLOCK_TIME_IS_VALID (self->timeout) && self->timeout > data.max) { GST_ELEMENT_WARNING (self, CORE, NEGOTIATION, ("%s", "Timeout too big"), ("The requested timeout value is too big for the latency in the " "current pipeline. Limiting to %" G_GINT64_FORMAT, data.max)); self->timeout = data.max; } self->priv->latency_live = data.live; self->priv->latency_min = data.min; self->priv->latency_max = data.max; /* add our own */ if (GST_CLOCK_TIME_IS_VALID (self->timeout)) { if (GST_CLOCK_TIME_IS_VALID (data.min)) data.min += self->timeout; if (GST_CLOCK_TIME_IS_VALID (data.max)) data.max += self->timeout; } GST_DEBUG_OBJECT (self, "configured latency live:%s min:%" G_GINT64_FORMAT " max:%" G_GINT64_FORMAT, data.live ? "true" : "false", data.min, data.max); gst_query_set_latency (query, data.live, data.min, data.max); return TRUE; }
static void aggregate_func (GstAggregator * self) { GstAggregatorPrivate *priv = self->priv; GstAggregatorClass *klass = GST_AGGREGATOR_GET_CLASS (self); if (self->priv->running == FALSE) { GST_DEBUG_OBJECT (self, "Not running anymore"); return; } QUEUE_POP (self); GST_LOG_OBJECT (self, "Checking aggregate"); while (priv->send_eos && gst_aggregator_iterate_sinkpads (self, (GstAggregatorPadForeachFunc) _check_all_pads_with_data_or_eos_or_timeout, NULL) && priv->running) { GST_TRACE_OBJECT (self, "Actually aggregating!"); priv->flow_return = klass->aggregate (self); if (priv->flow_return == GST_FLOW_EOS) { QUEUE_FLUSH (self); _push_eos (self); } if (priv->flow_return == GST_FLOW_FLUSHING && g_atomic_int_get (&priv->flush_seeking)) priv->flow_return = GST_FLOW_OK; GST_LOG_OBJECT (self, "flow return is %s", gst_flow_get_name (priv->flow_return)); if (priv->flow_return != GST_FLOW_OK) break; } }
static gboolean _forward_event_to_all_sinkpads (GstAggregator * self, GstEvent * event, gboolean flush) { EventData evdata; evdata.event = event; evdata.result = TRUE; evdata.flush = flush; /* We first need to set all pads as flushing in a first pass * as flush_start flush_stop is sometimes sent synchronously * while we send the seek event */ if (flush) gst_aggregator_iterate_sinkpads (self, (GstAggregatorPadForeachFunc) _set_flush_pending, NULL); gst_pad_forward (self->srcpad, (GstPadForwardFunction) event_forward_func, &evdata); gst_event_unref (event); return evdata.result; }
static GstFlowReturn gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout) { /* Get all pads that have data for us and store them in a * new list. * * Calculate the current output offset/timestamp and * offset_end/timestamp_end. Allocate a silence buffer * for this and store it. * * For all pads: * 1) Once per input buffer (cached) * 1) Check discont (flag and timestamp with tolerance) * 2) If discont or new, resync. That means: * 1) Drop all start data of the buffer that comes before * the current position/offset. * 2) Calculate the offset (output segment!) that the first * frame of the input buffer corresponds to. Base this on * the running time. * * 2) If the current pad's offset/offset_end overlaps with the output * offset/offset_end, mix it at the appropiate position in the output * buffer and advance the pad's position. Remember if this pad needs * a new buffer to advance behind the output offset_end. * * 3) If we had no pad with a buffer, go EOS. * * 4) If we had at least one pad that did not advance behind output * offset_end, let collected be called again for the current * output offset/offset_end. */ GstElement *element; GstAudioAggregator *aagg; GList *iter; GstFlowReturn ret; GstBuffer *outbuf = NULL; gint64 next_offset; gint64 next_timestamp; gint rate, bpf; gboolean dropped = FALSE; gboolean is_eos = TRUE; gboolean is_done = TRUE; guint blocksize; element = GST_ELEMENT (agg); aagg = GST_AUDIO_AGGREGATOR (agg); /* Sync pad properties to the stream time */ gst_aggregator_iterate_sinkpads (agg, (GstAggregatorPadForeachFunc) GST_DEBUG_FUNCPTR (sync_pad_values), NULL); GST_AUDIO_AGGREGATOR_LOCK (aagg); GST_OBJECT_LOCK (agg); /* Update position from the segment start/stop if needed */ if (agg->segment.position == -1) { if (agg->segment.rate > 0.0) agg->segment.position = agg->segment.start; else agg->segment.position = agg->segment.stop; } if (G_UNLIKELY (aagg->info.finfo->format == GST_AUDIO_FORMAT_UNKNOWN)) { if (timeout) { GST_DEBUG_OBJECT (aagg, "Got timeout before receiving any caps, don't output anything"); /* Advance position */ if (agg->segment.rate > 0.0) agg->segment.position += aagg->priv->output_buffer_duration; else if (agg->segment.position > aagg->priv->output_buffer_duration) agg->segment.position -= aagg->priv->output_buffer_duration; else agg->segment.position = 0; GST_OBJECT_UNLOCK (agg); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_OK; } else { GST_OBJECT_UNLOCK (agg); goto not_negotiated; } } if (aagg->priv->send_caps) { GST_OBJECT_UNLOCK (agg); gst_aggregator_set_src_caps (agg, aagg->current_caps); GST_OBJECT_LOCK (agg); aagg->priv->send_caps = FALSE; } rate = GST_AUDIO_INFO_RATE (&aagg->info); bpf = GST_AUDIO_INFO_BPF (&aagg->info); if (aagg->priv->offset == -1) { aagg->priv->offset = gst_util_uint64_scale (agg->segment.position - agg->segment.start, rate, GST_SECOND); GST_DEBUG_OBJECT (aagg, "Starting at offset %" G_GINT64_FORMAT, aagg->priv->offset); } blocksize = gst_util_uint64_scale (aagg->priv->output_buffer_duration, rate, GST_SECOND); blocksize = MAX (1, blocksize); /* for the next timestamp, use the sample counter, which will * never accumulate rounding errors */ /* FIXME: Reverse mixing does not work at all yet */ if (agg->segment.rate > 0.0) { next_offset = aagg->priv->offset + blocksize; } else { next_offset = aagg->priv->offset - blocksize; } next_timestamp = agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND, rate); if (aagg->priv->current_buffer == NULL) { GST_OBJECT_UNLOCK (agg); aagg->priv->current_buffer = GST_AUDIO_AGGREGATOR_GET_CLASS (aagg)->create_output_buffer (aagg, blocksize); /* Be careful, some things could have changed ? */ GST_OBJECT_LOCK (agg); GST_BUFFER_FLAG_SET (aagg->priv->current_buffer, GST_BUFFER_FLAG_GAP); } outbuf = aagg->priv->current_buffer; GST_LOG_OBJECT (agg, "Starting to mix %u samples for offset %" G_GINT64_FORMAT " with timestamp %" GST_TIME_FORMAT, blocksize, aagg->priv->offset, GST_TIME_ARGS (agg->segment.position)); for (iter = element->sinkpads; iter; iter = iter->next) { GstBuffer *inbuf; GstAudioAggregatorPad *pad = (GstAudioAggregatorPad *) iter->data; GstAggregatorPad *aggpad = (GstAggregatorPad *) iter->data; gboolean drop_buf = FALSE; gboolean pad_eos = gst_aggregator_pad_is_eos (aggpad); if (!pad_eos) is_eos = FALSE; inbuf = gst_aggregator_pad_get_buffer (aggpad); GST_OBJECT_LOCK (pad); if (!inbuf) { if (timeout) { if (pad->priv->output_offset < next_offset) { gint64 diff = next_offset - pad->priv->output_offset; GST_LOG_OBJECT (pad, "Timeout, missing %" G_GINT64_FORMAT " frames (%" GST_TIME_FORMAT ")", diff, GST_TIME_ARGS (gst_util_uint64_scale (diff, GST_SECOND, GST_AUDIO_INFO_RATE (&aagg->info)))); } } else if (!pad_eos) { is_done = FALSE; } GST_OBJECT_UNLOCK (pad); continue; } g_assert (!pad->priv->buffer || pad->priv->buffer == inbuf); /* New buffer? */ if (!pad->priv->buffer) { /* Takes ownership of buffer */ if (!gst_audio_aggregator_fill_buffer (aagg, pad, inbuf)) { dropped = TRUE; GST_OBJECT_UNLOCK (pad); gst_aggregator_pad_drop_buffer (aggpad); continue; } } else { gst_buffer_unref (inbuf); } if (!pad->priv->buffer && !dropped && pad_eos) { GST_DEBUG_OBJECT (aggpad, "Pad is in EOS state"); GST_OBJECT_UNLOCK (pad); continue; } g_assert (pad->priv->buffer); /* This pad is lacking behind, we need to update the offset * and maybe drop the current buffer */ if (pad->priv->output_offset < aagg->priv->offset) { gint64 diff = aagg->priv->offset - pad->priv->output_offset; gint64 odiff = diff; if (pad->priv->position + diff > pad->priv->size) diff = pad->priv->size - pad->priv->position; pad->priv->position += diff; pad->priv->output_offset += diff; if (pad->priv->position == pad->priv->size) { GST_LOG_OBJECT (pad, "Buffer was late by %" GST_TIME_FORMAT ", dropping %" GST_PTR_FORMAT, GST_TIME_ARGS (gst_util_uint64_scale (odiff, GST_SECOND, GST_AUDIO_INFO_RATE (&aagg->info))), pad->priv->buffer); /* Buffer done, drop it */ gst_buffer_replace (&pad->priv->buffer, NULL); dropped = TRUE; GST_OBJECT_UNLOCK (pad); gst_aggregator_pad_drop_buffer (aggpad); continue; } } if (pad->priv->output_offset >= aagg->priv->offset && pad->priv->output_offset < aagg->priv->offset + blocksize && pad->priv->buffer) { GST_LOG_OBJECT (aggpad, "Mixing buffer for current offset"); drop_buf = !gst_audio_aggregator_mix_buffer (aagg, pad, pad->priv->buffer, outbuf); if (pad->priv->output_offset >= next_offset) { GST_DEBUG_OBJECT (pad, "Pad is after current offset: %" G_GUINT64_FORMAT " >= %" G_GINT64_FORMAT, pad->priv->output_offset, next_offset); } else { is_done = FALSE; } } GST_OBJECT_UNLOCK (pad); if (drop_buf) gst_aggregator_pad_drop_buffer (aggpad); } GST_OBJECT_UNLOCK (agg); if (dropped) { /* We dropped a buffer, retry */ GST_INFO_OBJECT (aagg, "A pad dropped a buffer, wait for the next one"); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_OK; } if (!is_done && !is_eos) { /* Get more buffers */ GST_INFO_OBJECT (aagg, "We're not done yet for the current offset," " waiting for more data"); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_OK; } if (is_eos) { gint64 max_offset = 0; GST_DEBUG_OBJECT (aagg, "We're EOS"); GST_OBJECT_LOCK (agg); for (iter = GST_ELEMENT (agg)->sinkpads; iter; iter = iter->next) { GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data); max_offset = MAX ((gint64) max_offset, (gint64) pad->priv->output_offset); } GST_OBJECT_UNLOCK (agg); /* This means EOS or nothing mixed in at all */ if (aagg->priv->offset == max_offset) { gst_buffer_replace (&aagg->priv->current_buffer, NULL); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_EOS; } if (max_offset <= next_offset) { GST_DEBUG_OBJECT (aagg, "Last buffer is incomplete: %" G_GUINT64_FORMAT " <= %" G_GINT64_FORMAT, max_offset, next_offset); next_offset = max_offset; next_timestamp = agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND, rate); if (next_offset > aagg->priv->offset) gst_buffer_resize (outbuf, 0, (next_offset - aagg->priv->offset) * bpf); } } /* set timestamps on the output buffer */ GST_OBJECT_LOCK (agg); if (agg->segment.rate > 0.0) { GST_BUFFER_PTS (outbuf) = agg->segment.position; GST_BUFFER_OFFSET (outbuf) = aagg->priv->offset; GST_BUFFER_OFFSET_END (outbuf) = next_offset; GST_BUFFER_DURATION (outbuf) = next_timestamp - agg->segment.position; } else { GST_BUFFER_PTS (outbuf) = next_timestamp; GST_BUFFER_OFFSET (outbuf) = next_offset; GST_BUFFER_OFFSET_END (outbuf) = aagg->priv->offset; GST_BUFFER_DURATION (outbuf) = agg->segment.position - next_timestamp; } GST_OBJECT_UNLOCK (agg); /* send it out */ GST_LOG_OBJECT (aagg, "pushing outbuf %p, timestamp %" GST_TIME_FORMAT " offset %" G_GINT64_FORMAT, outbuf, GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)), GST_BUFFER_OFFSET (outbuf)); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); ret = gst_aggregator_finish_buffer (agg, aagg->priv->current_buffer); aagg->priv->current_buffer = NULL; GST_LOG_OBJECT (aagg, "pushed outbuf, result = %s", gst_flow_get_name (ret)); GST_AUDIO_AGGREGATOR_LOCK (aagg); GST_OBJECT_LOCK (agg); aagg->priv->offset = next_offset; agg->segment.position = next_timestamp; /* If there was a timeout and there was a gap in data in out of the streams, * then it's a very good time to for a resync with the timestamps. */ if (timeout) { for (iter = element->sinkpads; iter; iter = iter->next) { GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data); GST_OBJECT_LOCK (pad); if (pad->priv->output_offset < aagg->priv->offset) pad->priv->output_offset = -1; GST_OBJECT_UNLOCK (pad); } } GST_OBJECT_UNLOCK (agg); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return ret; /* ERRORS */ not_negotiated: { GST_AUDIO_AGGREGATOR_UNLOCK (aagg); GST_ELEMENT_ERROR (aagg, STREAM, FORMAT, (NULL), ("Unknown data received, not negotiated")); return GST_FLOW_NOT_NEGOTIATED; } }