static gboolean gst_audio_test_src_query (GstBaseSrc * basesrc, GstQuery * query) { GstAudioTestSrc *src = GST_AUDIO_TEST_SRC (basesrc); gboolean res = FALSE; switch (GST_QUERY_TYPE (query)) { case GST_QUERY_CONVERT: { GstFormat src_fmt, dest_fmt; gint64 src_val, dest_val; gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); if (src_fmt == dest_fmt) { dest_val = src_val; goto done; } switch (src_fmt) { case GST_FORMAT_DEFAULT: switch (dest_fmt) { case GST_FORMAT_TIME: /* samples to time */ dest_val = gst_util_uint64_scale_int (src_val, GST_SECOND, src->samplerate); break; default: goto error; } break; case GST_FORMAT_TIME: switch (dest_fmt) { case GST_FORMAT_DEFAULT: /* time to samples */ dest_val = gst_util_uint64_scale_int (src_val, src->samplerate, GST_SECOND); break; default: goto error; } break; default: goto error; } done: gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); res = TRUE; break; } default: res = GST_BASE_SRC_CLASS (parent_class)->query (basesrc, query); break; } return res; /* ERROR */ error: { GST_DEBUG_OBJECT (src, "query failed"); return FALSE; } }
static GstFlowReturn gst_siren_enc_chain (GstPad * pad, GstBuffer * buf) { GstSirenEnc *enc; GstFlowReturn ret = GST_FLOW_OK; GstBuffer *out_buf; guint8 *in_data, *out_data; guint8 *to_free = NULL; guint i, size, num_frames; gint out_size, in_size; gint encode_ret; gboolean discont; GstClockTime timestamp; guint64 distance; GstCaps *outcaps; enc = GST_SIREN_ENC (GST_PAD_PARENT (pad)); discont = GST_BUFFER_IS_DISCONT (buf); if (discont) { GST_DEBUG_OBJECT (enc, "received DISCONT, flush adapter"); gst_adapter_clear (enc->adapter); enc->discont = TRUE; } gst_adapter_push (enc->adapter, buf); size = gst_adapter_available (enc->adapter); GST_LOG_OBJECT (enc, "Received buffer of size %d with adapter of size : %d", GST_BUFFER_SIZE (buf), size); /* we need to process 640 input bytes to produce 40 output bytes */ /* calculate the amount of frames we will handle */ num_frames = size / 640; /* no frames, wait some more */ if (num_frames == 0) goto done; /* this is the input/output size */ in_size = num_frames * 640; out_size = num_frames * 40; GST_LOG_OBJECT (enc, "we have %u frames, %u in, %u out", num_frames, in_size, out_size); /* set output caps when needed */ if ((outcaps = GST_PAD_CAPS (enc->srcpad)) == NULL) { outcaps = gst_static_pad_template_get_caps (&srctemplate); gst_pad_set_caps (enc->srcpad, outcaps); gst_caps_unref (outcaps); } /* get a buffer */ ret = gst_pad_alloc_buffer_and_set_caps (enc->srcpad, -1, out_size, outcaps, &out_buf); if (ret != GST_FLOW_OK) goto alloc_failed; /* get the timestamp for the output buffer */ timestamp = gst_adapter_prev_timestamp (enc->adapter, &distance); /* add the amount of time taken by the distance */ if (timestamp != -1) timestamp += gst_util_uint64_scale_int (distance / 2, GST_SECOND, 16000); GST_LOG_OBJECT (enc, "timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT, GST_TIME_ARGS (timestamp), distance); /* get the input data for all the frames */ to_free = in_data = gst_adapter_take (enc->adapter, in_size); out_data = GST_BUFFER_DATA (out_buf); for (i = 0; i < num_frames; i++) { GST_LOG_OBJECT (enc, "Encoding frame %u/%u", i, num_frames); /* encode 640 input bytes to 40 output bytes */ encode_ret = Siren7_EncodeFrame (enc->encoder, in_data, out_data); if (encode_ret != 0) goto encode_error; /* move to next frame */ out_data += 40; in_data += 640; } GST_LOG_OBJECT (enc, "Finished encoding"); /* mark discont */ if (enc->discont) { GST_BUFFER_FLAG_SET (out_buf, GST_BUFFER_FLAG_DISCONT); enc->discont = FALSE; } GST_BUFFER_TIMESTAMP (out_buf) = timestamp; GST_BUFFER_DURATION (out_buf) = num_frames * FRAME_DURATION; ret = gst_pad_push (enc->srcpad, out_buf); done: if (to_free) g_free (to_free); return ret; /* ERRORS */ alloc_failed: { GST_DEBUG_OBJECT (enc, "failed to pad_alloc buffer: %d (%s)", ret, gst_flow_get_name (ret)); goto done; } encode_error: { GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL), ("Error encoding frame: %d", encode_ret)); ret = GST_FLOW_ERROR; gst_buffer_unref (out_buf); goto done; } }
static void output_loop (gpointer data) { GstPad *pad; GOmxCore *gomx; GOmxPort *out_port; GstOmxBaseFilter *self; GstFlowReturn ret = GST_FLOW_OK; pad = data; self = GST_OMX_BASE_FILTER (gst_pad_get_parent (pad)); gomx = self->gomx; GST_LOG_OBJECT (self, "begin"); if (!self->initialized) { g_error ("not initialized"); return; } out_port = self->out_port; if (G_LIKELY (out_port->enabled)) { OMX_BUFFERHEADERTYPE *omx_buffer = NULL; GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (out_port); GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer); if (G_UNLIKELY (!omx_buffer)) { GST_WARNING_OBJECT (self, "null buffer: leaving"); goto leave; } GST_DEBUG_OBJECT (self, "omx_buffer: size=%lu, len=%lu, flags=%lu, offset=%lu, timestamp=%lld", omx_buffer->nAllocLen, omx_buffer->nFilledLen, omx_buffer->nFlags, omx_buffer->nOffset, omx_buffer->nTimeStamp); if (G_LIKELY (omx_buffer->nFilledLen > 0)) { GstBuffer *buf; #if 1 /** @todo remove this check */ if (G_LIKELY (self->in_port->enabled)) { GstCaps *caps = NULL; caps = gst_pad_get_negotiated_caps (self->srcpad); if (!caps) { /** @todo We shouldn't be doing this. */ GST_WARNING_OBJECT (self, "faking settings changed notification"); if (gomx->settings_changed_cb) gomx->settings_changed_cb (gomx); } else { GST_LOG_OBJECT (self, "caps already fixed: %" GST_PTR_FORMAT, caps); gst_caps_unref (caps); } } #endif /* buf is always null when the output buffer pointer isn't shared. */ buf = omx_buffer->pAppPrivate; if (buf && !(omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) { GST_BUFFER_SIZE (buf) = omx_buffer->nFilledLen; if (self->use_timestamps) { GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); } omx_buffer->pAppPrivate = NULL; omx_buffer->pBuffer = NULL; ret = push_buffer (self, buf); gst_buffer_unref (buf); } else { /* This is only meant for the first OpenMAX buffers, * which need to be pre-allocated. */ /* Also for the very last one. */ gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, omx_buffer->nFilledLen, GST_PAD_CAPS (self->srcpad), &buf); if (G_LIKELY (buf)) { memcpy (GST_BUFFER_DATA (buf), omx_buffer->pBuffer + omx_buffer->nOffset, omx_buffer->nFilledLen); if (self->use_timestamps) { GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (omx_buffer->nTimeStamp, GST_SECOND, OMX_TICKS_PER_SECOND); } if (self->share_output_buffer) { GST_WARNING_OBJECT (self, "couldn't zero-copy"); g_free (omx_buffer->pBuffer); omx_buffer->pBuffer = NULL; } ret = push_buffer (self, buf); } else { GST_WARNING_OBJECT (self, "couldn't allocate buffer of size %d", omx_buffer->nFilledLen); } } } else { GST_WARNING_OBJECT (self, "empty buffer"); } if (G_UNLIKELY (omx_buffer->nFlags & OMX_BUFFERFLAG_EOS)) { GST_DEBUG_OBJECT (self, "got eos"); g_omx_core_set_done (gomx); goto leave; } if (self->share_output_buffer && !omx_buffer->pBuffer && omx_buffer->nOffset == 0) { GstBuffer *buf; GstFlowReturn result; GST_LOG_OBJECT (self, "allocate buffer"); result = gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE, omx_buffer->nAllocLen, GST_PAD_CAPS (self->srcpad), &buf); if (G_LIKELY (result == GST_FLOW_OK)) { gst_buffer_ref (buf); omx_buffer->pAppPrivate = buf; omx_buffer->pBuffer = GST_BUFFER_DATA (buf); omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf); } else { GST_WARNING_OBJECT (self, "could not pad allocate buffer, using malloc"); omx_buffer->pBuffer = g_malloc (omx_buffer->nAllocLen); } } if (self->share_output_buffer && !omx_buffer->pBuffer) { GST_ERROR_OBJECT (self, "no input buffer to share"); } omx_buffer->nFilledLen = 0; GST_LOG_OBJECT (self, "release_buffer"); g_omx_port_release_buffer (out_port, omx_buffer); } leave: self->last_pad_push_return = ret; if (ret != GST_FLOW_OK) { GST_INFO_OBJECT (self, "pause task, reason: %s", gst_flow_get_name (ret)); gst_pad_pause_task (self->srcpad); } GST_LOG_OBJECT (self, "end"); gst_object_unref (self); }
/* For the clock skew we use a windowed low point averaging algorithm as can be * found in Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation * over Network Delays": * http://www.grame.fr/Ressources/pub/TR-050601.pdf * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546 * * The idea is that the jitter is composed of: * * J = N + n * * N : a constant network delay. * n : random added noise. The noise is concentrated around 0 * * In the receiver we can track the elapsed time at the sender with: * * send_diff(i) = (Tsi - Ts0); * * Tsi : The time at the sender at packet i * Ts0 : The time at the sender at the first packet * * This is the difference between the RTP timestamp in the first received packet * and the current packet. * * At the receiver we have to deal with the jitter introduced by the network. * * recv_diff(i) = (Tri - Tr0) * * Tri : The time at the receiver at packet i * Tr0 : The time at the receiver at the first packet * * Both of these values contain a jitter Ji, a jitter for packet i, so we can * write: * * recv_diff(i) = (Cri + D + ni) - (Cr0 + D + n0)) * * Cri : The time of the clock at the receiver for packet i * D + ni : The jitter when receiving packet i * * We see that the network delay is irrelevant here as we can elliminate D: * * recv_diff(i) = (Cri + ni) - (Cr0 + n0)) * * The drift is now expressed as: * * Drift(i) = recv_diff(i) - send_diff(i); * * We now keep the W latest values of Drift and find the minimum (this is the * one with the lowest network jitter and thus the one which is least affected * by it). We average this lowest value to smooth out the resulting network skew. * * Both the window and the weighting used for averaging influence the accuracy * of the drift estimation. Finding the correct parameters turns out to be a * compromise between accuracy and inertia. * * We use a 2 second window or up to 512 data points, which is statistically big * enough to catch spikes (FIXME, detect spikes). * We also use a rather large weighting factor (125) to smoothly adapt. During * startup, when filling the window, we use a parabolic weighting factor, the * more the window is filled, the faster we move to the detected possible skew. * * Returns: @time adjusted with the clock skew. */ static GstClockTime calculate_skew (RTPJitterBuffer * jbuf, guint32 rtptime, GstClockTime time) { guint64 ext_rtptime; guint64 send_diff, recv_diff; gint64 delta; gint64 old; gint pos, i; GstClockTime gstrtptime, out_time; guint64 slope; ext_rtptime = gst_rtp_buffer_ext_timestamp (&jbuf->ext_rtptime, rtptime); if (jbuf->last_rtptime != -1 && ext_rtptime == jbuf->last_rtptime) return jbuf->prev_out_time; gstrtptime = gst_util_uint64_scale_int (ext_rtptime, GST_SECOND, jbuf->clock_rate); /* keep track of the last extended rtptime */ jbuf->last_rtptime = ext_rtptime; /* first time, lock on to time and gstrtptime */ if (G_UNLIKELY (jbuf->base_time == -1)) { jbuf->base_time = time; jbuf->prev_out_time = -1; GST_DEBUG ("Taking new base time %" GST_TIME_FORMAT, GST_TIME_ARGS (time)); } if (G_UNLIKELY (jbuf->base_rtptime == -1)) { jbuf->base_rtptime = gstrtptime; jbuf->base_extrtp = ext_rtptime; jbuf->prev_send_diff = -1; GST_DEBUG ("Taking new base rtptime %" GST_TIME_FORMAT, GST_TIME_ARGS (gstrtptime)); } if (G_LIKELY (gstrtptime >= jbuf->base_rtptime)) send_diff = gstrtptime - jbuf->base_rtptime; else if (time != -1) { /* elapsed time at sender, timestamps can go backwards and thus be smaller * than our base time, take a new base time in that case. */ GST_WARNING ("backward timestamps at server, taking new base time"); rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, FALSE); send_diff = 0; } else { GST_WARNING ("backward timestamps at server but no timestamps"); send_diff = 0; /* at least try to get a new timestamp.. */ jbuf->base_time = -1; } GST_DEBUG ("extrtp %" G_GUINT64_FORMAT ", gstrtp %" GST_TIME_FORMAT ", base %" GST_TIME_FORMAT ", send_diff %" GST_TIME_FORMAT, ext_rtptime, GST_TIME_ARGS (gstrtptime), GST_TIME_ARGS (jbuf->base_rtptime), GST_TIME_ARGS (send_diff)); /* we don't have an arrival timestamp so we can't do skew detection. we * should still apply a timestamp based on RTP timestamp and base_time */ if (time == -1 || jbuf->base_time == -1) goto no_skew; /* elapsed time at receiver, includes the jitter */ recv_diff = time - jbuf->base_time; /* measure the diff */ delta = ((gint64) recv_diff) - ((gint64) send_diff); /* measure the slope, this gives a rought estimate between the sender speed * and the receiver speed. This should be approximately 8, higher values * indicate a burst (especially when the connection starts) */ if (recv_diff > 0) slope = (send_diff * 8) / recv_diff; else slope = 8; GST_DEBUG ("time %" GST_TIME_FORMAT ", base %" GST_TIME_FORMAT ", recv_diff %" GST_TIME_FORMAT ", slope %" G_GUINT64_FORMAT, GST_TIME_ARGS (time), GST_TIME_ARGS (jbuf->base_time), GST_TIME_ARGS (recv_diff), slope); /* if the difference between the sender timeline and the receiver timeline * changed too quickly we have to resync because the server likely restarted * its timestamps. */ if (ABS (delta - jbuf->skew) > GST_SECOND) { GST_WARNING ("delta - skew: %" GST_TIME_FORMAT " too big, reset skew", GST_TIME_ARGS (ABS (delta - jbuf->skew))); rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, TRUE); send_diff = 0; delta = 0; } pos = jbuf->window_pos; if (G_UNLIKELY (jbuf->window_filling)) { /* we are filling the window */ GST_DEBUG ("filling %d, delta %" G_GINT64_FORMAT, pos, delta); jbuf->window[pos++] = delta; /* calc the min delta we observed */ if (G_UNLIKELY (pos == 1 || delta < jbuf->window_min)) jbuf->window_min = delta; if (G_UNLIKELY (send_diff >= MAX_TIME || pos >= MAX_WINDOW)) { jbuf->window_size = pos; /* window filled */ GST_DEBUG ("min %" G_GINT64_FORMAT, jbuf->window_min); /* the skew is now the min */ jbuf->skew = jbuf->window_min; jbuf->window_filling = FALSE; } else { gint perc_time, perc_window, perc; /* figure out how much we filled the window, this depends on the amount of * time we have or the max number of points we keep. */ perc_time = send_diff * 100 / MAX_TIME; perc_window = pos * 100 / MAX_WINDOW; perc = MAX (perc_time, perc_window); /* make a parabolic function, the closer we get to the MAX, the more value * we give to the scaling factor of the new value */ perc = perc * perc; /* quickly go to the min value when we are filling up, slowly when we are * just starting because we're not sure it's a good value yet. */ jbuf->skew = (perc * jbuf->window_min + ((10000 - perc) * jbuf->skew)) / 10000; jbuf->window_size = pos + 1; } } else { /* pick old value and store new value. We keep the previous value in order * to quickly check if the min of the window changed */ old = jbuf->window[pos]; jbuf->window[pos++] = delta; if (G_UNLIKELY (delta <= jbuf->window_min)) { /* if the new value we inserted is smaller or equal to the current min, * it becomes the new min */ jbuf->window_min = delta; } else if (G_UNLIKELY (old == jbuf->window_min)) { gint64 min = G_MAXINT64; /* if we removed the old min, we have to find a new min */ for (i = 0; i < jbuf->window_size; i++) { /* we found another value equal to the old min, we can stop searching now */ if (jbuf->window[i] == old) { min = old; break; } if (jbuf->window[i] < min) min = jbuf->window[i]; } jbuf->window_min = min; } /* average the min values */ jbuf->skew = (jbuf->window_min + (124 * jbuf->skew)) / 125; GST_DEBUG ("delta %" G_GINT64_FORMAT ", new min: %" G_GINT64_FORMAT, delta, jbuf->window_min); } /* wrap around in the window */ if (G_UNLIKELY (pos >= jbuf->window_size)) pos = 0; jbuf->window_pos = pos; no_skew: /* the output time is defined as the base timestamp plus the RTP time * adjusted for the clock skew .*/ if (jbuf->base_time != -1) { out_time = jbuf->base_time + send_diff; /* skew can be negative and we don't want to make invalid timestamps */ if (jbuf->skew < 0 && out_time < -jbuf->skew) { out_time = 0; } else { out_time += jbuf->skew; } /* check if timestamps are not going backwards, we can only check this if we * have a previous out time and a previous send_diff */ if (G_LIKELY (jbuf->prev_out_time != -1 && jbuf->prev_send_diff != -1)) { /* now check for backwards timestamps */ if (G_UNLIKELY ( /* if the server timestamps went up and the out_time backwards */ (send_diff > jbuf->prev_send_diff && out_time < jbuf->prev_out_time) || /* if the server timestamps went backwards and the out_time forwards */ (send_diff < jbuf->prev_send_diff && out_time > jbuf->prev_out_time) || /* if the server timestamps did not change */ send_diff == jbuf->prev_send_diff)) { GST_DEBUG ("backwards timestamps, using previous time"); out_time = jbuf->prev_out_time; } } if (time != -1 && out_time + jbuf->delay < time) { /* if we are going to produce a timestamp that is later than the input * timestamp, we need to reset the jitterbuffer. Likely the server paused * temporarily */ GST_DEBUG ("out %" GST_TIME_FORMAT " + %" G_GUINT64_FORMAT " < time %" GST_TIME_FORMAT ", reset jitterbuffer", GST_TIME_ARGS (out_time), jbuf->delay, GST_TIME_ARGS (time)); rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, TRUE); out_time = time; send_diff = 0; } } else out_time = -1; jbuf->prev_out_time = out_time; jbuf->prev_send_diff = send_diff; GST_DEBUG ("skew %" G_GINT64_FORMAT ", out %" GST_TIME_FORMAT, jbuf->skew, GST_TIME_ARGS (out_time)); return out_time; }
static GstFlowReturn gst_base_audio_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, GstBuffer ** outbuf) { GstBaseAudioSrc *src = GST_BASE_AUDIO_SRC (bsrc); GstBuffer *buf; guchar *data; guint samples, total_samples; guint64 sample; gint bps; GstRingBuffer *ringbuffer; GstRingBufferSpec *spec; guint read; GstClockTime timestamp, duration; GstClock *clock; ringbuffer = src->ringbuffer; spec = &ringbuffer->spec; if (G_UNLIKELY (!gst_ring_buffer_is_acquired (ringbuffer))) goto wrong_state; bps = spec->bytes_per_sample; if ((length == 0 && bsrc->blocksize == 0) || length == -1) /* no length given, use the default segment size */ length = spec->segsize; else /* make sure we round down to an integral number of samples */ length -= length % bps; /* figure out the offset in the ringbuffer */ if (G_UNLIKELY (offset != -1)) { sample = offset / bps; /* if a specific offset was given it must be the next sequential * offset we expect or we fail for now. */ if (src->next_sample != -1 && sample != src->next_sample) goto wrong_offset; } else { /* calculate the sequentially next sample we need to read. This can jump and * create a DISCONT. */ sample = gst_base_audio_src_get_offset (src); } GST_DEBUG_OBJECT (src, "reading from sample %" G_GUINT64_FORMAT, sample); /* get the number of samples to read */ total_samples = samples = length / bps; /* FIXME, using a bufferpool would be nice here */ buf = gst_buffer_new_and_alloc (length); data = GST_BUFFER_DATA (buf); do { read = gst_ring_buffer_read (ringbuffer, sample, data, samples); GST_DEBUG_OBJECT (src, "read %u of %u", read, samples); /* if we read all, we're done */ if (read == samples) break; /* else something interrupted us and we wait for playing again. */ GST_DEBUG_OBJECT (src, "wait playing"); if (gst_base_src_wait_playing (bsrc) != GST_FLOW_OK) goto stopped; GST_DEBUG_OBJECT (src, "continue playing"); /* read next samples */ sample += read; samples -= read; data += read * bps; } while (TRUE); /* mark discontinuity if needed */ if (G_UNLIKELY (sample != src->next_sample) && src->next_sample != -1) { GST_WARNING_OBJECT (src, "create DISCONT of %" G_GUINT64_FORMAT " samples at sample %" G_GUINT64_FORMAT, sample - src->next_sample, sample); GST_ELEMENT_WARNING (src, CORE, CLOCK, (_("Can't record audio fast enough")), ("dropped %" G_GUINT64_FORMAT " samples", sample - src->next_sample)); GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); } src->next_sample = sample + samples; /* get the normal timestamp to get the duration. */ timestamp = gst_util_uint64_scale_int (sample, GST_SECOND, spec->rate); duration = gst_util_uint64_scale_int (src->next_sample, GST_SECOND, spec->rate) - timestamp; GST_OBJECT_LOCK (src); if (!(clock = GST_ELEMENT_CLOCK (src))) goto no_sync; if (clock != src->clock) { /* we are slaved, check how to handle this */ switch (src->priv->slave_method) { case GST_BASE_AUDIO_SRC_SLAVE_RESAMPLE: /* not implemented, use skew algorithm. This algorithm should * work on the readout pointer and produces more or less samples based * on the clock drift */ case GST_BASE_AUDIO_SRC_SLAVE_SKEW: { GstClockTime running_time; GstClockTime base_time; GstClockTime current_time; guint64 running_time_sample; gint running_time_segment; gint current_segment; gint segment_skew; gint sps; /* samples per segment */ sps = ringbuffer->samples_per_seg; /* get the current time */ current_time = gst_clock_get_time (clock); /* get the basetime */ base_time = GST_ELEMENT_CAST (src)->base_time; /* get the running_time */ running_time = current_time - base_time; /* the running_time converted to a sample (relative to the ringbuffer) */ running_time_sample = gst_util_uint64_scale_int (running_time, spec->rate, GST_SECOND); /* the segmentnr corrensponding to running_time, round down */ running_time_segment = running_time_sample / sps; /* the segment currently read from the ringbuffer */ current_segment = sample / sps; /* the skew we have between running_time and the ringbuffertime */ segment_skew = running_time_segment - current_segment; GST_DEBUG_OBJECT (bsrc, "\n running_time = %" GST_TIME_FORMAT "\n timestamp = %" GST_TIME_FORMAT "\n running_time_segment = %d" "\n current_segment = %d" "\n segment_skew = %d", GST_TIME_ARGS (running_time), GST_TIME_ARGS (timestamp), running_time_segment, current_segment, segment_skew); /* Resync the ringbuffer if: * 1. We get one segment into the future. * This is clearly a lie, because we can't * possibly have a buffer with timestamp 1 at * time 0. (unless it has time-travelled...) * * 2. We are more than the length of the ringbuffer behind. * The length of the ringbuffer then gets to dictate * the threshold for what is concidered "too late" * * 3. If this is our first buffer. * We know that we should catch up to running_time * the first time we are ran. */ if ((segment_skew < 0) || (segment_skew >= ringbuffer->spec.segtotal) || (current_segment == 0)) { gint segments_written; gint first_segment; gint last_segment; gint new_last_segment; gint segment_diff; gint new_first_segment; guint64 new_sample; /* we are going to say that the last segment was captured at the current time (running_time), minus one segment of creation-latency in the ringbuffer. This can be thought of as: The segment arrived in the ringbuffer at time X, and that means it was created at time X - (one segment). */ new_last_segment = running_time_segment - 1; /* for better readablity */ first_segment = current_segment; /* get the amount of segments written from the device by now */ segments_written = g_atomic_int_get (&ringbuffer->segdone); /* subtract the base to segments_written to get the number of the last written segment in the ringbuffer (one segment written = segment 0) */ last_segment = segments_written - ringbuffer->segbase - 1; /* we see how many segments the ringbuffer was timeshifted */ segment_diff = new_last_segment - last_segment; /* we move the first segment an equal amount */ new_first_segment = first_segment + segment_diff; /* and we also move the segmentbase the same amount */ ringbuffer->segbase -= segment_diff; /* we calculate the new sample value */ new_sample = ((guint64) new_first_segment) * sps; /* and get the relative time to this -> our new timestamp */ timestamp = gst_util_uint64_scale_int (new_sample, GST_SECOND, spec->rate); /* we update the next sample accordingly */ src->next_sample = new_sample + samples; GST_DEBUG_OBJECT (bsrc, "Timeshifted the ringbuffer with %d segments: " "Updating the timestamp to %" GST_TIME_FORMAT ", " "and src->next_sample to %" G_GUINT64_FORMAT, segment_diff, GST_TIME_ARGS (timestamp), src->next_sample); } break; } case GST_BASE_AUDIO_SRC_SLAVE_RETIMESTAMP: { GstClockTime base_time, latency; /* We are slaved to another clock, take running time of the pipeline clock and * timestamp against it. Somebody else in the pipeline should figure out the * clock drift. We keep the duration we calculated above. */ timestamp = gst_clock_get_time (clock); base_time = GST_ELEMENT_CAST (src)->base_time; if (timestamp > base_time) timestamp -= base_time; else timestamp = 0; /* subtract latency */ latency = gst_util_uint64_scale_int (total_samples, GST_SECOND, spec->rate); if (timestamp > latency) timestamp -= latency; else timestamp = 0; } case GST_BASE_AUDIO_SRC_SLAVE_NONE: break; } } else { GstClockTime base_time; /* we are not slaved, subtract base_time */ base_time = GST_ELEMENT_CAST (src)->base_time; if (timestamp > base_time) timestamp -= base_time; else timestamp = 0; } no_sync: GST_OBJECT_UNLOCK (src); GST_BUFFER_TIMESTAMP (buf) = timestamp; GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_OFFSET (buf) = sample; GST_BUFFER_OFFSET_END (buf) = sample + samples; *outbuf = buf; return GST_FLOW_OK; /* ERRORS */ wrong_state: { GST_DEBUG_OBJECT (src, "ringbuffer in wrong state"); return GST_FLOW_WRONG_STATE; } wrong_offset: { GST_ELEMENT_ERROR (src, RESOURCE, SEEK, (NULL), ("resource can only be operated on sequentially but offset %" G_GUINT64_FORMAT " was given", offset)); return GST_FLOW_ERROR; } stopped: { gst_buffer_unref (buf); GST_DEBUG_OBJECT (src, "ringbuffer stopped"); return GST_FLOW_WRONG_STATE; } }
static GstFlowReturn gst_wavpack_parse_push_buffer (GstWavpackParse * wvparse, GstBuffer * buf, WavpackHeader * header) { wvparse->current_offset += header->ckSize + 8; wvparse->segment.last_stop = header->block_index; if (wvparse->need_newsegment) { if (gst_wavpack_parse_send_newsegment (wvparse, FALSE)) wvparse->need_newsegment = FALSE; } /* send any queued events */ if (wvparse->queued_events) { GList *l; for (l = wvparse->queued_events; l != NULL; l = l->next) { gst_pad_push_event (wvparse->srcpad, GST_EVENT (l->data)); } g_list_free (wvparse->queued_events); wvparse->queued_events = NULL; } if (wvparse->pending_buffer == NULL) { wvparse->pending_buffer = buf; wvparse->pending_offset = header->block_index; } else if (wvparse->pending_offset == header->block_index) { wvparse->pending_buffer = gst_buffer_join (wvparse->pending_buffer, buf); } else { GST_ERROR ("Got incomplete block, dropping"); gst_buffer_unref (wvparse->pending_buffer); wvparse->pending_buffer = buf; wvparse->pending_offset = header->block_index; } if (!(header->flags & FINAL_BLOCK)) return GST_FLOW_OK; buf = wvparse->pending_buffer; wvparse->pending_buffer = NULL; GST_BUFFER_TIMESTAMP (buf) = gst_util_uint64_scale_int (header->block_index, GST_SECOND, wvparse->samplerate); GST_BUFFER_DURATION (buf) = gst_util_uint64_scale_int (header->block_samples, GST_SECOND, wvparse->samplerate); GST_BUFFER_OFFSET (buf) = header->block_index; GST_BUFFER_OFFSET_END (buf) = header->block_index + header->block_samples; if (wvparse->discont || wvparse->next_block_index != header->block_index) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); wvparse->discont = FALSE; } wvparse->next_block_index = header->block_index + header->block_samples; gst_buffer_set_caps (buf, GST_PAD_CAPS (wvparse->srcpad)); GST_LOG_OBJECT (wvparse, "Pushing buffer with time %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); return gst_pad_push (wvparse->srcpad, buf); }
static gboolean gme_setup (GstGmeDec * gme) { gme_info_t *info; gme_err_t gme_err = NULL; GstTagList *taglist; guint64 total_duration; guint64 fade_time; GstBuffer *buffer; GstSegment seg; GstMapInfo map; if (!gst_adapter_available (gme->adapter) || !gme_negotiate (gme)) { return FALSE; } buffer = gst_adapter_take_buffer (gme->adapter, gst_adapter_available (gme->adapter)); gst_buffer_map (buffer, &map, GST_MAP_READ); gme_err = gme_open_data (map.data, map.size, &gme->player, 32000); gst_buffer_unmap (buffer, &map); gst_buffer_unref (buffer); if (gme_err || !gme->player) { if (gme->player) { gme_delete (gme->player); gme->player = NULL; } GST_ELEMENT_ERROR (gme, STREAM, DEMUX, (NULL), ("%s", gme_err)); return FALSE; } gme_err = gme_track_info (gme->player, &info, 0); taglist = gst_tag_list_new_empty (); if (info->song && *info->song) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE, info->song, NULL); if (info->author && *info->author) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ARTIST, info->author, NULL); /* Prefer the name of the official soundtrack over the name of the game (since this is * how track numbers are derived) */ if (info->game && *info->game) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ALBUM, info->game, NULL); if (info->comment && *info->comment) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COMMENT, info->comment, NULL); if (info->dumper && *info->dumper) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_CONTACT, info->dumper, NULL); if (info->copyright && *info->copyright) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_COPYRIGHT, info->copyright, NULL); if (info->system && *info->system) gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, info->system, NULL); gme->total_duration = total_duration = gst_util_uint64_scale_int (info->play_length + (info->loop_length > 0 ? 8000 : 0), GST_MSECOND, 1); fade_time = info->loop_length > 0 ? info->play_length : 0; gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_DURATION, total_duration, NULL); gst_pad_push_event (gme->srcpad, gst_event_new_tag (taglist)); g_free (info); #ifdef HAVE_LIBGME_ACCURACY /* TODO: Is it worth it to make this optional? */ gme_enable_accuracy (gme->player, 1); #endif gme_start_track (gme->player, 0); if (fade_time) gme_set_fade (gme->player, fade_time); gst_segment_init (&seg, GST_FORMAT_TIME); gst_pad_push_event (gme->srcpad, gst_event_new_segment (&seg)); gst_pad_start_task (gme->srcpad, (GstTaskFunction) gst_gme_play, gme->srcpad, NULL); gme->initialized = TRUE; gme->seeking = FALSE; gme->seekpoint = 0; return gme->initialized; }
static GstFlowReturn gst_visual_gl_chain (GstPad * pad, GstBuffer * buffer) { GstGLBuffer *outbuf = NULL; GstVisualGL *visual = GST_VISUAL_GL (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; guint avail; GST_DEBUG_OBJECT (visual, "chain function called"); /* If we don't have an output format yet, preallocate a buffer to try and * set one */ if (GST_PAD_CAPS (visual->srcpad) == NULL) { ret = get_buffer (visual, &outbuf); if (ret != GST_FLOW_OK) { gst_buffer_unref (buffer); goto beach; } } /* resync on DISCONT */ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (visual->adapter); } GST_DEBUG_OBJECT (visual, "Input buffer has %d samples, time=%" G_GUINT64_FORMAT, GST_BUFFER_SIZE (buffer) / visual->bps, GST_BUFFER_TIMESTAMP (buffer)); gst_adapter_push (visual->adapter, buffer); while (TRUE) { gboolean need_skip; guint64 dist, timestamp; GST_DEBUG_OBJECT (visual, "processing buffer"); avail = gst_adapter_available (visual->adapter); GST_DEBUG_OBJECT (visual, "avail now %u", avail); /* we need at least VISUAL_SAMPLES samples */ if (avail < VISUAL_SAMPLES * visual->bps) break; /* we need at least enough samples to make one frame */ if (avail < visual->spf * visual->bps) break; /* get timestamp of the current adapter byte */ timestamp = gst_adapter_prev_timestamp (visual->adapter, &dist); if (GST_CLOCK_TIME_IS_VALID (timestamp)) { /* convert bytes to time */ dist /= visual->bps; timestamp += gst_util_uint64_scale_int (dist, GST_SECOND, visual->rate); } if (timestamp != -1) { gint64 qostime; /* QoS is done on running time */ qostime = gst_segment_to_running_time (&visual->segment, GST_FORMAT_TIME, timestamp); qostime += visual->duration; GST_OBJECT_LOCK (visual); /* check for QoS, don't compute buffers that are known to be late */ need_skip = visual->earliest_time != -1 && qostime <= visual->earliest_time; GST_OBJECT_UNLOCK (visual); if (need_skip) { GST_WARNING_OBJECT (visual, "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (visual->earliest_time)); goto skip; } } /* alloc a buffer if we don't have one yet, this happens * when we pushed a buffer in this while loop before */ if (outbuf == NULL) { ret = get_buffer (visual, &outbuf); if (ret != GST_FLOW_OK) { goto beach; } } /* render libvisual plugin to our target */ gst_gl_display_use_fbo_v2 (visual->display, visual->width, visual->height, visual->fbo, visual->depthbuffer, visual->midtexture, (GLCB_V2) render_frame, (gpointer *) visual); /* gst video is top-down whereas opengl plan is bottom up */ gst_gl_display_use_fbo (visual->display, visual->width, visual->height, visual->fbo, visual->depthbuffer, outbuf->texture, (GLCB) bottom_up_to_top_down, visual->width, visual->height, visual->midtexture, 0, visual->width, 0, visual->height, GST_GL_DISPLAY_PROJECTION_ORTHO2D, (gpointer *) visual); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; GST_BUFFER_DURATION (outbuf) = visual->duration; ret = gst_pad_push (visual->srcpad, GST_BUFFER (outbuf)); outbuf = NULL; skip: GST_DEBUG_OBJECT (visual, "finished frame, flushing %u samples from input", visual->spf); /* Flush out the number of samples per frame */ gst_adapter_flush (visual->adapter, visual->spf * visual->bps); /* quit the loop if something was wrong */ if (ret != GST_FLOW_OK) break; } beach: if (outbuf != NULL) gst_gl_buffer_unref (outbuf); gst_object_unref (visual); return ret; }
static GstCaps* gst_imx_blitter_video_transform_fixate_size_caps(GstBaseTransform *transform, GstPadDirection direction, GstCaps *caps, GstCaps *othercaps) { GstStructure *ins, *outs; GValue const *from_par, *to_par; GValue fpar = { 0, }, tpar = { 0, }; ins = gst_caps_get_structure(caps, 0); outs = gst_caps_get_structure(othercaps, 0); from_par = gst_structure_get_value(ins, "pixel-aspect-ratio"); to_par = gst_structure_get_value(outs, "pixel-aspect-ratio"); /* If we're fixating from the sinkpad we always set the PAR and * assume that missing PAR on the sinkpad means 1/1 and * missing PAR on the srcpad means undefined */ if (direction == GST_PAD_SINK) { if (!from_par) { g_value_init(&fpar, GST_TYPE_FRACTION); gst_value_set_fraction(&fpar, 1, 1); from_par = &fpar; } if (!to_par) { g_value_init(&tpar, GST_TYPE_FRACTION_RANGE); gst_value_set_fraction_range_full(&tpar, 1, G_MAXINT, G_MAXINT, 1); to_par = &tpar; } } else { if (!to_par) { g_value_init(&tpar, GST_TYPE_FRACTION); gst_value_set_fraction(&tpar, 1, 1); to_par = &tpar; gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL); } if (!from_par) { g_value_init(&fpar, GST_TYPE_FRACTION); gst_value_set_fraction (&fpar, 1, 1); from_par = &fpar; } } /* we have both PAR but they might not be fixated */ { gint from_w, from_h, from_par_n, from_par_d, to_par_n, to_par_d; gint w = 0, h = 0; gint from_dar_n, from_dar_d; gint num, den; /* from_par should be fixed */ g_return_val_if_fail(gst_value_is_fixed(from_par), othercaps); from_par_n = gst_value_get_fraction_numerator(from_par); from_par_d = gst_value_get_fraction_denominator(from_par); gst_structure_get_int(ins, "width", &from_w); gst_structure_get_int(ins, "height", &from_h); gst_structure_get_int(outs, "width", &w); gst_structure_get_int(outs, "height", &h); /* if both width and height are already fixed, we can't do anything * about it anymore */ if (w && h) { guint n, d; GST_DEBUG_OBJECT(transform, "dimensions already set to %dx%d, not fixating", w, h); if (!gst_value_is_fixed(to_par)) { if (gst_video_calculate_display_ratio(&n, &d, from_w, from_h, from_par_n, from_par_d, w, h)) { GST_DEBUG_OBJECT(transform, "fixating to_par to %dx%d", n, d); if (gst_structure_has_field(outs, "pixel-aspect-ratio")) gst_structure_fixate_field_nearest_fraction(outs, "pixel-aspect-ratio", n, d); else if (n != d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, n, d, NULL); } } goto done; } /* Calculate input DAR */ if (!gst_util_fraction_multiply(from_w, from_h, from_par_n, from_par_d, &from_dar_n, &from_dar_d)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } GST_DEBUG_OBJECT(transform, "Input DAR is %d/%d", from_dar_n, from_dar_d); /* If either width or height are fixed there's not much we * can do either except choosing a height or width and PAR * that matches the DAR as good as possible */ if (h) { GstStructure *tmp; gint set_w, set_par_n, set_par_d; GST_DEBUG_OBJECT(transform, "height is fixed (%d)", h); /* If the PAR is fixed too, there's not much to do * except choosing the width that is nearest to the * width with the same DAR */ if (gst_value_is_fixed(to_par)) { to_par_n = gst_value_get_fraction_numerator(to_par); to_par_d = gst_value_get_fraction_denominator(to_par); GST_DEBUG_OBJECT(transform, "PAR is fixed %d/%d", to_par_n, to_par_d); if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, to_par_d, to_par_n, &num, &den)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } w = (guint) gst_util_uint64_scale_int(h, num, den); gst_structure_fixate_field_nearest_int(outs, "width", w); goto done; } /* The PAR is not fixed and it's quite likely that we can set * an arbitrary PAR. */ /* Check if we can keep the input width */ tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "width", from_w); gst_structure_get_int(tmp, "width", &set_w); /* Might have failed but try to keep the DAR nonetheless by * adjusting the PAR */ if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, h, set_w, &to_par_n, &to_par_d)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); gst_structure_free(tmp); goto done; } if (!gst_structure_has_field(tmp, "pixel-aspect-ratio")) gst_structure_set_value(tmp, "pixel-aspect-ratio", to_par); gst_structure_fixate_field_nearest_fraction(tmp, "pixel-aspect-ratio", to_par_n, to_par_d); gst_structure_get_fraction(tmp, "pixel-aspect-ratio", &set_par_n, &set_par_d); gst_structure_free(tmp); /* Check if the adjusted PAR is accepted */ if (set_par_n == to_par_n && set_par_d == to_par_d) { if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "width", G_TYPE_INT, set_w, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* Otherwise scale the width to the new PAR and check if the * adjusted with is accepted. If all that fails we can't keep * the DAR */ if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, set_par_d, set_par_n, &num, &den)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } w = (guint) gst_util_uint64_scale_int(h, num, den); gst_structure_fixate_field_nearest_int(outs, "width", w); if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } else if (w) { GstStructure *tmp; gint set_h, set_par_n, set_par_d; GST_DEBUG_OBJECT(transform, "width is fixed (%d)", w); /* If the PAR is fixed too, there's not much to do * except choosing the height that is nearest to the * height with the same DAR */ if (gst_value_is_fixed(to_par)) { to_par_n = gst_value_get_fraction_numerator(to_par); to_par_d = gst_value_get_fraction_denominator(to_par); GST_DEBUG_OBJECT(transform, "PAR is fixed %d/%d", to_par_n, to_par_d); if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, to_par_d, to_par_n, &num, &den)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } h = (guint) gst_util_uint64_scale_int(w, den, num); gst_structure_fixate_field_nearest_int(outs, "height", h); goto done; } /* The PAR is not fixed and it's quite likely that we can set * an arbitrary PAR. */ /* Check if we can keep the input height */ tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "height", from_h); gst_structure_get_int(tmp, "height", &set_h); /* Might have failed but try to keep the DAR nonetheless by * adjusting the PAR */ if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, set_h, w, &to_par_n, &to_par_d)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); gst_structure_free(tmp); goto done; } if (!gst_structure_has_field(tmp, "pixel-aspect-ratio")) gst_structure_set_value(tmp, "pixel-aspect-ratio", to_par); gst_structure_fixate_field_nearest_fraction(tmp, "pixel-aspect-ratio", to_par_n, to_par_d); gst_structure_get_fraction(tmp, "pixel-aspect-ratio", &set_par_n, &set_par_d); gst_structure_free(tmp); /* Check if the adjusted PAR is accepted */ if (set_par_n == to_par_n && set_par_d == to_par_d) { if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "height", G_TYPE_INT, set_h, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* Otherwise scale the height to the new PAR and check if the * adjusted with is accepted. If all that fails we can't keep * the DAR */ if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, set_par_d, set_par_n, &num, &den)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } h = (guint) gst_util_uint64_scale_int(w, den, num); gst_structure_fixate_field_nearest_int(outs, "height", h); if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } else if (gst_value_is_fixed(to_par)) { GstStructure *tmp; gint set_h, set_w, f_h, f_w; to_par_n = gst_value_get_fraction_numerator(to_par); to_par_d = gst_value_get_fraction_denominator(to_par); /* Calculate scale factor for the PAR change */ if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, to_par_n, to_par_d, &num, &den)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } /* Try to keep the input height (because of interlacing) */ tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "height", from_h); gst_structure_get_int(tmp, "height", &set_h); /* This might have failed but try to scale the width * to keep the DAR nonetheless */ w = (guint) gst_util_uint64_scale_int(set_h, num, den); gst_structure_fixate_field_nearest_int(tmp, "width", w); gst_structure_get_int(tmp, "width", &set_w); gst_structure_free(tmp); /* We kept the DAR and the height is nearest to the original height */ if (set_w == w) { gst_structure_set(outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); goto done; } f_h = set_h; f_w = set_w; /* If the former failed, try to keep the input width at least */ tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "width", from_w); gst_structure_get_int(tmp, "width", &set_w); /* This might have failed but try to scale the width * to keep the DAR nonetheless */ h = (guint) gst_util_uint64_scale_int(set_w, den, num); gst_structure_fixate_field_nearest_int(tmp, "height", h); gst_structure_get_int(tmp, "height", &set_h); gst_structure_free(tmp); /* We kept the DAR and the width is nearest to the original width */ if (set_h == h) { gst_structure_set(outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); goto done; } /* If all this failed, keep the height that was nearest to the orignal * height and the nearest possible width. This changes the DAR but * there's not much else to do here. */ gst_structure_set(outs, "width", G_TYPE_INT, f_w, "height", G_TYPE_INT, f_h, NULL); goto done; } else { GstStructure *tmp; gint set_h, set_w, set_par_n, set_par_d, tmp2; /* width, height and PAR are not fixed but passthrough is not possible */ /* First try to keep the height and width as good as possible * and scale PAR */ tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "height", from_h); gst_structure_get_int(tmp, "height", &set_h); gst_structure_fixate_field_nearest_int(tmp, "width", from_w); gst_structure_get_int(tmp, "width", &set_w); if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, set_h, set_w, &to_par_n, &to_par_d)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } if (!gst_structure_has_field(tmp, "pixel-aspect-ratio")) gst_structure_set_value(tmp, "pixel-aspect-ratio", to_par); gst_structure_fixate_field_nearest_fraction(tmp, "pixel-aspect-ratio", to_par_n, to_par_d); gst_structure_get_fraction(tmp, "pixel-aspect-ratio", &set_par_n, &set_par_d); gst_structure_free(tmp); if (set_par_n == to_par_n && set_par_d == to_par_d) { gst_structure_set(outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* Otherwise try to scale width to keep the DAR with the set * PAR and height */ if (!gst_util_fraction_multiply(from_dar_n, from_dar_d, set_par_d, set_par_n, &num, &den)) { GST_ELEMENT_ERROR(transform, CORE, NEGOTIATION, (NULL), ("Error calculating the output scaled size - integer overflow")); goto done; } w = (guint) gst_util_uint64_scale_int(set_h, num, den); tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "width", w); gst_structure_get_int(tmp, "width", &tmp2); gst_structure_free(tmp); if (tmp2 == w) { gst_structure_set(outs, "width", G_TYPE_INT, tmp2, "height", G_TYPE_INT, set_h, NULL); if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* ... or try the same with the height */ h = (guint) gst_util_uint64_scale_int(set_w, den, num); tmp = gst_structure_copy(outs); gst_structure_fixate_field_nearest_int(tmp, "height", h); gst_structure_get_int(tmp, "height", &tmp2); gst_structure_free(tmp); if (tmp2 == h) { gst_structure_set(outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, tmp2, NULL); if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); goto done; } /* If all fails we can't keep the DAR and take the nearest values * for everything from the first try */ gst_structure_set(outs, "width", G_TYPE_INT, set_w, "height", G_TYPE_INT, set_h, NULL); if (gst_structure_has_field(outs, "pixel-aspect-ratio") || set_par_n != set_par_d) gst_structure_set(outs, "pixel-aspect-ratio", GST_TYPE_FRACTION, set_par_n, set_par_d, NULL); } } done: gst_imx_blitter_video_transform_fixate_format_caps(transform, caps, othercaps); GST_DEBUG_OBJECT(transform, "fixated othercaps to %" GST_PTR_FORMAT, (gpointer)othercaps); if (from_par == &fpar) g_value_unset(&fpar); if (to_par == &tpar) g_value_unset(&tpar); return othercaps; }
static GstFlowReturn dvdspu_handle_vid_buffer (GstDVDSpu * dvdspu, GstBuffer * buf) { GstClockTime new_ts; GstFlowReturn ret; gboolean using_ref = FALSE; DVD_SPU_LOCK (dvdspu); if (buf == NULL) { GstClockTime next_ts = dvdspu->video_seg.position; next_ts += gst_util_uint64_scale_int (GST_SECOND, dvdspu->spu_state.info.fps_d, dvdspu->spu_state.info.fps_n); /* NULL buffer was passed - use the reference frame and update the timestamp, * or else there's nothing to draw, and just return GST_FLOW_OK */ if (dvdspu->ref_frame == NULL) { dvdspu->video_seg.position = next_ts; goto no_ref_frame; } buf = gst_buffer_copy (dvdspu->ref_frame); #if 0 g_print ("Duping frame %" GST_TIME_FORMAT " with new TS %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (next_ts)); #endif GST_BUFFER_TIMESTAMP (buf) = next_ts; using_ref = TRUE; } if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { dvdspu->video_seg.position = GST_BUFFER_TIMESTAMP (buf); } new_ts = gst_segment_to_running_time (&dvdspu->video_seg, GST_FORMAT_TIME, dvdspu->video_seg.position); #if 0 g_print ("TS %" GST_TIME_FORMAT " running: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (dvdspu->video_seg.position), GST_TIME_ARGS (new_ts)); #endif gst_dvd_spu_advance_spu (dvdspu, new_ts); /* If we have an active SPU command set, we store a copy of the frame in case * we hit a still and need to draw on it. Otherwise, a reference is * sufficient in case we later encounter a still */ if ((dvdspu->spu_state.flags & SPU_STATE_FORCED_DSP) || ((dvdspu->spu_state.flags & SPU_STATE_FORCED_ONLY) == 0 && (dvdspu->spu_state.flags & SPU_STATE_DISPLAY))) { if (using_ref == FALSE) { GstBuffer *copy; /* Take a copy in case we hit a still frame and need the pristine * frame around */ copy = gst_buffer_copy (buf); gst_buffer_replace (&dvdspu->ref_frame, copy); gst_buffer_unref (copy); } /* Render the SPU overlay onto the buffer */ buf = gst_buffer_make_writable (buf); gstspu_render (dvdspu, buf); } else { if (using_ref == FALSE) { /* Not going to draw anything on this frame, just store a reference * in case we hit a still frame and need it */ gst_buffer_replace (&dvdspu->ref_frame, buf); } } if (dvdspu->spu_state.flags & SPU_STATE_STILL_FRAME) { GST_DEBUG_OBJECT (dvdspu, "Outputting buffer with TS %" GST_TIME_FORMAT "from chain while in still", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); } DVD_SPU_UNLOCK (dvdspu); /* just push out the incoming buffer without touching it */ ret = gst_pad_push (dvdspu->srcpad, buf); return ret; no_ref_frame: DVD_SPU_UNLOCK (dvdspu); return GST_FLOW_OK; }
static gboolean gst_visual_gl_src_query (GstPad * pad, GstQuery * query) { gboolean res; GstVisualGL *visual; visual = GST_VISUAL_GL (gst_pad_get_parent (pad)); switch (GST_QUERY_TYPE (query)) { case GST_QUERY_LATENCY: { /* We need to send the query upstream and add the returned latency to our * own */ GstClockTime min_latency, max_latency; gboolean us_live; GstClockTime our_latency; guint max_samples; if ((res = gst_pad_peer_query (visual->sinkpad, query))) { gst_query_parse_latency (query, &us_live, &min_latency, &max_latency); GST_DEBUG_OBJECT (visual, "Peer latency: min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency)); /* the max samples we must buffer buffer */ max_samples = MAX (VISUAL_SAMPLES, visual->spf); our_latency = gst_util_uint64_scale_int (max_samples, GST_SECOND, visual->rate); GST_DEBUG_OBJECT (visual, "Our latency: %" GST_TIME_FORMAT, GST_TIME_ARGS (our_latency)); /* we add some latency but only if we need to buffer more than what * upstream gives us */ min_latency += our_latency; if (max_latency != -1) max_latency += our_latency; GST_DEBUG_OBJECT (visual, "Calculated total latency : min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency)); gst_query_set_latency (query, TRUE, min_latency, max_latency); } break; } case GST_QUERY_CUSTOM: { GstStructure *structure = gst_query_get_structure (query); gchar *name = gst_element_get_name (visual); res = g_strcmp0 (name, gst_structure_get_name (structure)) == 0; g_free (name); if (!res) res = gst_pad_query_default (pad, query); break; } default: res = gst_pad_peer_query (visual->sinkpad, query); break; } gst_object_unref (visual); return res; }
guint64 gst_base_video_convert_bytes_to_frames (GstVideoState * state, guint64 bytes) { return gst_util_uint64_scale_int (bytes, 1, state->bytes_per_picture); }
static gboolean gst_video_rate_setcaps (GstBaseTransform * trans, GstCaps * in_caps, GstCaps * out_caps) { GstVideoRate *videorate = GST_VIDEO_RATE (trans); GstStructure *structure; gboolean ret = TRUE; gint rate_numerator, rate_denominator; videorate = GST_VIDEO_RATE (trans); GST_DEBUG_OBJECT (trans, "setcaps called in: %" GST_PTR_FORMAT " out: %" GST_PTR_FORMAT, in_caps, out_caps); structure = gst_caps_get_structure (in_caps, 0); if (!gst_structure_get_fraction (structure, "framerate", &rate_numerator, &rate_denominator)) goto no_framerate; videorate->from_rate_numerator = rate_numerator; videorate->from_rate_denominator = rate_denominator; structure = gst_caps_get_structure (out_caps, 0); if (!gst_structure_get_fraction (structure, "framerate", &rate_numerator, &rate_denominator)) goto no_framerate; /* out_frame_count is scaled by the frame rate caps when calculating next_ts. * when the frame rate caps change, we must update base_ts and reset * out_frame_count */ if (videorate->to_rate_numerator) { videorate->base_ts += gst_util_uint64_scale (videorate->out_frame_count, videorate->to_rate_denominator * GST_SECOND, videorate->to_rate_numerator); } videorate->out_frame_count = 0; videorate->to_rate_numerator = rate_numerator; videorate->to_rate_denominator = rate_denominator; if (rate_numerator) videorate->wanted_diff = gst_util_uint64_scale_int (GST_SECOND, rate_denominator, rate_numerator); else videorate->wanted_diff = 0; done: /* After a setcaps, our caps may have changed. In that case, we can't use * the old buffer, if there was one (it might have different dimensions) */ GST_DEBUG_OBJECT (videorate, "swapping old buffers"); gst_video_rate_swap_prev (videorate, NULL, GST_CLOCK_TIME_NONE); videorate->last_ts = GST_CLOCK_TIME_NONE; videorate->average = 0; return ret; no_framerate: { GST_DEBUG_OBJECT (videorate, "no framerate specified"); ret = FALSE; goto done; } }
static GstFlowReturn gst_smokeenc_chain (GstPad * pad, GstBuffer * buf) { GstSmokeEnc *smokeenc; guchar *data, *outdata; gulong size; gint outsize; guint encsize; GstBuffer *outbuf; SmokeCodecFlags flags; GstFlowReturn ret; smokeenc = GST_SMOKEENC (GST_OBJECT_PARENT (pad)); data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); GST_LOG_OBJECT (smokeenc, "got buffer of %lu bytes", size); if (smokeenc->need_header) { outbuf = gst_buffer_new_and_alloc (256); outdata = GST_BUFFER_DATA (outbuf); GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf); smokecodec_encode_id (smokeenc->info, outdata, &encsize); GST_BUFFER_SIZE (outbuf) = encsize; gst_buffer_set_caps (outbuf, GST_PAD_CAPS (smokeenc->srcpad)); ret = gst_pad_push (smokeenc->srcpad, outbuf); if (ret != GST_FLOW_OK) goto done; smokeenc->need_header = FALSE; } encsize = outsize = smokeenc->width * smokeenc->height * 3; outbuf = gst_buffer_new_and_alloc (outsize); outdata = GST_BUFFER_DATA (outbuf); GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND, smokeenc->fps_denom, smokeenc->fps_num); gst_buffer_set_caps (outbuf, GST_PAD_CAPS (smokeenc->srcpad)); flags = 0; if ((smokeenc->frame % smokeenc->keyframe) == 0) { flags |= SMOKECODEC_KEYFRAME; } smokecodec_set_quality (smokeenc->info, smokeenc->min_quality, smokeenc->max_quality); smokecodec_set_threshold (smokeenc->info, smokeenc->threshold); smokecodec_encode (smokeenc->info, data, flags, outdata, &encsize); gst_buffer_unref (buf); GST_BUFFER_SIZE (outbuf) = encsize; GST_BUFFER_OFFSET (outbuf) = smokeenc->frame; GST_BUFFER_OFFSET_END (outbuf) = smokeenc->frame + 1; ret = gst_pad_push (smokeenc->srcpad, outbuf); smokeenc->frame++; done: return ret; }
static gboolean gst_wavpack_parse_src_query (GstPad * pad, GstQuery * query) { GstWavpackParse *parse = GST_WAVPACK_PARSE (gst_pad_get_parent (pad)); GstFormat format; gboolean ret = FALSE; switch (GST_QUERY_TYPE (query)) { case GST_QUERY_POSITION:{ gint64 cur; guint rate; GST_OBJECT_LOCK (parse); cur = parse->segment.last_stop; rate = parse->samplerate; GST_OBJECT_UNLOCK (parse); if (rate == 0) { GST_DEBUG_OBJECT (parse, "haven't read header yet"); break; } gst_query_parse_position (query, &format, NULL); switch (format) { case GST_FORMAT_TIME: cur = gst_util_uint64_scale_int (cur, GST_SECOND, rate); gst_query_set_position (query, GST_FORMAT_TIME, cur); ret = TRUE; break; case GST_FORMAT_DEFAULT: gst_query_set_position (query, GST_FORMAT_DEFAULT, cur); ret = TRUE; break; default: GST_DEBUG_OBJECT (parse, "cannot handle position query in " "%s format. Forwarding upstream.", gst_format_get_name (format)); ret = gst_pad_query_default (pad, query); break; } break; } case GST_QUERY_DURATION:{ gint64 len; guint rate; GST_OBJECT_LOCK (parse); rate = parse->samplerate; len = parse->total_samples; GST_OBJECT_UNLOCK (parse); if (rate == 0) { GST_DEBUG_OBJECT (parse, "haven't read header yet"); break; } gst_query_parse_duration (query, &format, NULL); switch (format) { case GST_FORMAT_TIME: if (len != G_GINT64_CONSTANT (-1)) len = gst_util_uint64_scale_int (len, GST_SECOND, rate); gst_query_set_duration (query, GST_FORMAT_TIME, len); ret = TRUE; break; case GST_FORMAT_DEFAULT: gst_query_set_duration (query, GST_FORMAT_DEFAULT, len); ret = TRUE; break; default: GST_DEBUG_OBJECT (parse, "cannot handle duration query in " "%s format. Forwarding upstream.", gst_format_get_name (format)); ret = gst_pad_query_default (pad, query); break; } break; } case GST_QUERY_SEEKING:{ gst_query_parse_seeking (query, &format, NULL, NULL, NULL); if (format == GST_FORMAT_TIME || format == GST_FORMAT_DEFAULT) { gboolean seekable; gint64 duration = -1; /* only fails if we didn't read the headers yet and can't say * anything about our seeking capabilities */ if (!gst_pad_query_duration (pad, &format, &duration)) break; /* can't seek in streaming mode yet */ GST_OBJECT_LOCK (parse); seekable = (parse->adapter == NULL); GST_OBJECT_UNLOCK (parse); gst_query_set_seeking (query, format, seekable, 0, duration); ret = TRUE; } break; } default:{ ret = gst_pad_query_default (pad, query); break; } } gst_object_unref (parse); return ret; }
static GstFlowReturn gst_inter_video_src_create (GstBaseSrc * src, guint64 offset, guint size, GstBuffer ** buf) { GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); GstBuffer *buffer; GST_DEBUG_OBJECT (intervideosrc, "create"); buffer = NULL; g_mutex_lock (&intervideosrc->surface->mutex); if (intervideosrc->surface->video_buffer) { buffer = gst_buffer_ref (intervideosrc->surface->video_buffer); intervideosrc->surface->video_buffer_count++; if (intervideosrc->surface->video_buffer_count >= 30) { gst_buffer_unref (intervideosrc->surface->video_buffer); intervideosrc->surface->video_buffer = NULL; } } g_mutex_unlock (&intervideosrc->surface->mutex); if (buffer == NULL) { GstMapInfo map; buffer = gst_buffer_new_and_alloc (GST_VIDEO_INFO_SIZE (&intervideosrc->info)); gst_buffer_map (buffer, &map, GST_MAP_WRITE); memset (map.data, 16, GST_VIDEO_INFO_COMP_STRIDE (&intervideosrc->info, 0) * GST_VIDEO_INFO_COMP_HEIGHT (&intervideosrc->info, 0)); memset (map.data + GST_VIDEO_INFO_COMP_OFFSET (&intervideosrc->info, 1), 128, 2 * GST_VIDEO_INFO_COMP_STRIDE (&intervideosrc->info, 1) * GST_VIDEO_INFO_COMP_HEIGHT (&intervideosrc->info, 1)); gst_buffer_unmap (buffer, &map); } buffer = gst_buffer_make_writable (buffer); GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (GST_SECOND * intervideosrc->n_frames, GST_VIDEO_INFO_FPS_D (&intervideosrc->info), GST_VIDEO_INFO_FPS_N (&intervideosrc->info)); GST_DEBUG_OBJECT (intervideosrc, "create ts %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (GST_SECOND * (intervideosrc->n_frames + 1), GST_VIDEO_INFO_FPS_D (&intervideosrc->info), GST_VIDEO_INFO_FPS_N (&intervideosrc->info)) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = intervideosrc->n_frames; GST_BUFFER_OFFSET_END (buffer) = -1; GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); if (intervideosrc->n_frames == 0) { GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); } intervideosrc->n_frames++; *buf = buffer; return GST_FLOW_OK; }
static gboolean gst_wavpack_parse_handle_seek_event (GstWavpackParse * wvparse, GstEvent * event) { GstSeekFlags seek_flags; GstSeekType start_type; GstSeekType stop_type; GstSegment segment; GstFormat format; gboolean only_update; gboolean flush, ret; gdouble speed; gint64 stop; gint64 start; /* sample we want to seek to */ gint64 byte_offset; /* byte offset the chunk we seek to starts at */ gint64 chunk_start; /* first sample in chunk we seek to */ guint rate; gint64 last_stop; if (wvparse->adapter) { GST_DEBUG_OBJECT (wvparse, "seeking in streaming mode not implemented yet"); return FALSE; } gst_event_parse_seek (event, &speed, &format, &seek_flags, &start_type, &start, &stop_type, &stop); if (format != GST_FORMAT_DEFAULT && format != GST_FORMAT_TIME) { GST_DEBUG ("seeking is only supported in TIME or DEFAULT format"); return FALSE; } if (speed < 0.0) { GST_DEBUG ("only forward playback supported, rate %f not allowed", speed); return FALSE; } GST_OBJECT_LOCK (wvparse); rate = wvparse->samplerate; if (rate == 0) { GST_OBJECT_UNLOCK (wvparse); GST_DEBUG ("haven't read header yet"); return FALSE; } /* figure out the last position we need to play. If it's configured (stop != * -1), use that, else we play until the total duration of the file */ if (stop == -1) stop = wvparse->segment.duration; /* convert from time to samples if necessary */ if (format == GST_FORMAT_TIME) { if (start_type != GST_SEEK_TYPE_NONE) start = gst_util_uint64_scale_int (start, rate, GST_SECOND); if (stop_type != GST_SEEK_TYPE_NONE) stop = gst_util_uint64_scale_int (stop, rate, GST_SECOND); } if (start < 0) { GST_OBJECT_UNLOCK (wvparse); GST_DEBUG_OBJECT (wvparse, "Invalid start sample %" G_GINT64_FORMAT, start); return FALSE; } flush = ((seek_flags & GST_SEEK_FLAG_FLUSH) != 0); /* operate on segment copy until we know the seek worked */ segment = wvparse->segment; gst_segment_set_seek (&segment, speed, GST_FORMAT_DEFAULT, seek_flags, start_type, start, stop_type, stop, &only_update); #if 0 if (only_update) { wvparse->segment = segment; gst_wavpack_parse_send_newsegment (wvparse, TRUE); goto done; } #endif gst_pad_push_event (wvparse->sinkpad, gst_event_new_flush_start ()); if (flush) { gst_pad_push_event (wvparse->srcpad, gst_event_new_flush_start ()); } else { gst_pad_pause_task (wvparse->sinkpad); } GST_PAD_STREAM_LOCK (wvparse->sinkpad); /* Save current position */ last_stop = wvparse->segment.last_stop; gst_pad_push_event (wvparse->sinkpad, gst_event_new_flush_stop ()); if (flush) { gst_pad_push_event (wvparse->srcpad, gst_event_new_flush_stop ()); } GST_DEBUG_OBJECT (wvparse, "Performing seek to %" GST_TIME_FORMAT " sample %" G_GINT64_FORMAT, GST_TIME_ARGS (segment.start * GST_SECOND / rate), start); ret = gst_wavpack_parse_scan_to_find_sample (wvparse, segment.start, &byte_offset, &chunk_start); if (ret) { GST_DEBUG_OBJECT (wvparse, "new offset: %" G_GINT64_FORMAT, byte_offset); wvparse->current_offset = byte_offset; /* we want to send a newsegment event with the actual seek position * as start, even though our first buffer might start before the * configured segment. We leave it up to the decoder or sink to crop * the output buffers accordingly */ wvparse->segment = segment; wvparse->segment.last_stop = chunk_start; wvparse->need_newsegment = TRUE; wvparse->discont = (last_stop != chunk_start) ? TRUE : FALSE; /* if we're doing a segment seek, post a SEGMENT_START message */ if (wvparse->segment.flags & GST_SEEK_FLAG_SEGMENT) { gst_element_post_message (GST_ELEMENT_CAST (wvparse), gst_message_new_segment_start (GST_OBJECT_CAST (wvparse), wvparse->segment.format, wvparse->segment.last_stop)); } } else { GST_DEBUG_OBJECT (wvparse, "seek failed: don't know where to seek to"); } GST_PAD_STREAM_UNLOCK (wvparse->sinkpad); GST_OBJECT_UNLOCK (wvparse); gst_pad_start_task (wvparse->sinkpad, (GstTaskFunction) gst_wavpack_parse_loop, wvparse); return ret; }
static GstFlowReturn gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime runtimestamp = G_GINT64_CONSTANT (0); gsize size; size = gst_buffer_get_size (buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) goto error_after; } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) goto dropped; } if (identity->dump) { GstMapInfo info; gst_buffer_map (buf, &info, GST_MAP_READ); gst_util_dump_mem (info.data, info.size); gst_buffer_unmap (buf, &info); } if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "chain", buf, size); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = time; GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) runtimestamp = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf)); if ((identity->sync) && (trans->segment.format == GST_FORMAT_TIME)) { GstClock *clock; GST_OBJECT_LOCK (identity); if ((clock = GST_ELEMENT (identity)->clock)) { GstClockReturn cret; GstClockTime timestamp; timestamp = runtimestamp + GST_ELEMENT (identity)->base_time; /* save id if we need to unlock */ identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp); GST_OBJECT_UNLOCK (identity); cret = gst_clock_id_wait (identity->clock_id, NULL); GST_OBJECT_LOCK (identity); if (identity->clock_id) { gst_clock_id_unref (identity->clock_id); identity->clock_id = NULL; } if (cret == GST_CLOCK_UNSCHEDULED) ret = GST_FLOW_EOS; } GST_OBJECT_UNLOCK (identity); } identity->offset += size; if (identity->sleep_time && ret == GST_FLOW_OK) g_usleep (identity->sleep_time); if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME) && (ret == GST_FLOW_OK)) { GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = runtimestamp; GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE; } return ret; /* ERRORS */ error_after: { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } dropped: { if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "dropping", buf, size); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
static GstFlowReturn gst_two_lame_chain (GstPad * pad, GstBuffer * buf) { GstTwoLame *twolame; guchar *mp3_data; gint mp3_buffer_size, mp3_size; gint64 duration; GstFlowReturn result; gint num_samples; guint8 *data; guint size; twolame = GST_TWO_LAME (GST_PAD_PARENT (pad)); GST_LOG_OBJECT (twolame, "entered chain"); if (!twolame->setup) goto not_setup; data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); if (twolame->float_input) num_samples = size / 4; else num_samples = size / 2; /* allocate space for output */ mp3_buffer_size = 1.25 * num_samples + 16384; mp3_data = g_malloc (mp3_buffer_size); if (twolame->num_channels == 1) { if (twolame->float_input) mp3_size = twolame_encode_buffer_float32 (twolame->glopts, (float *) data, (float *) data, num_samples, mp3_data, mp3_buffer_size); else mp3_size = twolame_encode_buffer (twolame->glopts, (short int *) data, (short int *) data, num_samples, mp3_data, mp3_buffer_size); } else { if (twolame->float_input) mp3_size = twolame_encode_buffer_float32_interleaved (twolame->glopts, (float *) data, num_samples / twolame->num_channels, mp3_data, mp3_buffer_size); else mp3_size = twolame_encode_buffer_interleaved (twolame->glopts, (short int *) data, num_samples / twolame->num_channels, mp3_data, mp3_buffer_size); } GST_LOG_OBJECT (twolame, "encoded %d bytes of audio to %d bytes of mp3", size, mp3_size); if (twolame->float_input) duration = gst_util_uint64_scale_int (size, GST_SECOND, 4 * twolame->samplerate * twolame->num_channels); else duration = gst_util_uint64_scale_int (size, GST_SECOND, 2 * twolame->samplerate * twolame->num_channels); if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE && GST_BUFFER_DURATION (buf) != duration) { GST_DEBUG_OBJECT (twolame, "incoming buffer had incorrect duration %" GST_TIME_FORMAT ", outgoing buffer will have correct duration %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (duration)); } if (twolame->last_ts == GST_CLOCK_TIME_NONE) { twolame->last_ts = GST_BUFFER_TIMESTAMP (buf); twolame->last_offs = GST_BUFFER_OFFSET (buf); twolame->last_duration = duration; } else { twolame->last_duration += duration; } gst_buffer_unref (buf); if (mp3_size < 0) { g_warning ("error %d", mp3_size); } if (mp3_size > 0) { GstBuffer *outbuf; outbuf = gst_buffer_new (); GST_BUFFER_DATA (outbuf) = mp3_data; GST_BUFFER_MALLOCDATA (outbuf) = mp3_data; GST_BUFFER_SIZE (outbuf) = mp3_size; GST_BUFFER_TIMESTAMP (outbuf) = twolame->last_ts; GST_BUFFER_OFFSET (outbuf) = twolame->last_offs; GST_BUFFER_DURATION (outbuf) = twolame->last_duration; gst_buffer_set_caps (outbuf, GST_PAD_CAPS (twolame->srcpad)); result = gst_pad_push (twolame->srcpad, outbuf); twolame->last_flow = result; if (result != GST_FLOW_OK) { GST_DEBUG_OBJECT (twolame, "flow return: %s", gst_flow_get_name (result)); } if (GST_CLOCK_TIME_IS_VALID (twolame->last_ts)) twolame->eos_ts = twolame->last_ts + twolame->last_duration; else twolame->eos_ts = GST_CLOCK_TIME_NONE; twolame->last_ts = GST_CLOCK_TIME_NONE; } else { g_free (mp3_data); result = GST_FLOW_OK; } return result; /* ERRORS */ not_setup: { gst_buffer_unref (buf); GST_ELEMENT_ERROR (twolame, CORE, NEGOTIATION, (NULL), ("encoder not initialized (input is not audio?)")); return GST_FLOW_ERROR; } }
static gboolean vorbis_dec_convert (GstPad * pad, GstFormat src_format, gint64 src_value, GstFormat * dest_format, gint64 * dest_value) { gboolean res = TRUE; GstVorbisDec *dec; guint64 scale = 1; if (src_format == *dest_format) { *dest_value = src_value; return TRUE; } dec = GST_VORBIS_DEC (gst_pad_get_parent (pad)); if (!dec->initialized) goto no_header; if (dec->sinkpad == pad && (src_format == GST_FORMAT_BYTES || *dest_format == GST_FORMAT_BYTES)) goto no_format; switch (src_format) { case GST_FORMAT_TIME: switch (*dest_format) { case GST_FORMAT_BYTES: scale = dec->width * dec->vi.channels; case GST_FORMAT_DEFAULT: *dest_value = scale * gst_util_uint64_scale_int (src_value, dec->vi.rate, GST_SECOND); break; default: res = FALSE; } break; case GST_FORMAT_DEFAULT: switch (*dest_format) { case GST_FORMAT_BYTES: *dest_value = src_value * dec->width * dec->vi.channels; break; case GST_FORMAT_TIME: *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, dec->vi.rate); break; default: res = FALSE; } break; case GST_FORMAT_BYTES: switch (*dest_format) { case GST_FORMAT_DEFAULT: *dest_value = src_value / (dec->width * dec->vi.channels); break; case GST_FORMAT_TIME: *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, dec->vi.rate * dec->width * dec->vi.channels); break; default: res = FALSE; } break; default: res = FALSE; } done: gst_object_unref (dec); return res; /* ERRORS */ no_header: { GST_DEBUG_OBJECT (dec, "no header packets received"); res = FALSE; goto done; } no_format: { GST_DEBUG_OBJECT (dec, "formats unsupported"); res = FALSE; goto done; } }
static gboolean gst_vorbis_enc_convert_sink (GstPad * pad, GstFormat src_format, gint64 src_value, GstFormat * dest_format, gint64 * dest_value) { gboolean res = TRUE; guint scale = 1; gint bytes_per_sample; GstVorbisEnc *vorbisenc; vorbisenc = GST_VORBISENC (gst_pad_get_parent (pad)); bytes_per_sample = vorbisenc->channels * 2; switch (src_format) { case GST_FORMAT_BYTES: switch (*dest_format) { case GST_FORMAT_DEFAULT: if (bytes_per_sample == 0) return FALSE; *dest_value = src_value / bytes_per_sample; break; case GST_FORMAT_TIME: { gint byterate = bytes_per_sample * vorbisenc->frequency; if (byterate == 0) return FALSE; *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, byterate); break; } default: res = FALSE; } break; case GST_FORMAT_DEFAULT: switch (*dest_format) { case GST_FORMAT_BYTES: *dest_value = src_value * bytes_per_sample; break; case GST_FORMAT_TIME: if (vorbisenc->frequency == 0) return FALSE; *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, vorbisenc->frequency); break; default: res = FALSE; } break; case GST_FORMAT_TIME: switch (*dest_format) { case GST_FORMAT_BYTES: scale = bytes_per_sample; /* fallthrough */ case GST_FORMAT_DEFAULT: *dest_value = gst_util_uint64_scale_int (src_value, scale * vorbisenc->frequency, GST_SECOND); break; default: res = FALSE; } break; default: res = FALSE; } gst_object_unref (vorbisenc); return res; }
static gboolean vorbis_parse_convert (GstPad * pad, GstFormat src_format, gint64 src_value, GstFormat * dest_format, gint64 * dest_value) { gboolean res = TRUE; GstVorbisParse *parse; guint64 scale = 1; parse = GST_VORBIS_PARSE (GST_PAD_PARENT (pad)); /* fixme: assumes atomic access to lots of instance variables modified from * the streaming thread, including 64-bit variables */ if (parse->packetno < 4) return FALSE; if (src_format == *dest_format) { *dest_value = src_value; return TRUE; } if (parse->sinkpad == pad && (src_format == GST_FORMAT_BYTES || *dest_format == GST_FORMAT_BYTES)) return FALSE; switch (src_format) { case GST_FORMAT_TIME: switch (*dest_format) { case GST_FORMAT_BYTES: scale = sizeof (float) * parse->vi.channels; case GST_FORMAT_DEFAULT: *dest_value = scale * gst_util_uint64_scale_int (src_value, parse->vi.rate, GST_SECOND); break; default: res = FALSE; } break; case GST_FORMAT_DEFAULT: switch (*dest_format) { case GST_FORMAT_BYTES: *dest_value = src_value * sizeof (float) * parse->vi.channels; break; case GST_FORMAT_TIME: *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, parse->vi.rate); break; default: res = FALSE; } break; case GST_FORMAT_BYTES: switch (*dest_format) { case GST_FORMAT_DEFAULT: *dest_value = src_value / (sizeof (float) * parse->vi.channels); break; case GST_FORMAT_TIME: *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, parse->vi.rate * sizeof (float) * parse->vi.channels); break; default: res = FALSE; } break; default: res = FALSE; } return res; }
static GstFlowReturn gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime runtimestamp = G_GINT64_CONSTANT (0); if (identity->check_perfect) gst_identity_check_perfect (identity, buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) { if (!identity->silent) { GST_OBJECT_LOCK (identity); g_free (identity->last_message); identity->last_message = g_strdup_printf ("dropping ******* (%s:%s)i (%d bytes, timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT ", flags: %d) %p", GST_DEBUG_PAD_NAME (trans->sinkpad), GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } } if (identity->dump) { gst_util_dump_mem (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); } if (!identity->silent) { GST_OBJECT_LOCK (identity); g_free (identity->last_message); identity->last_message = g_strdup_printf ("chain ******* (%s:%s)i (%d bytes, timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT ", flags: %d) %p", GST_DEBUG_PAD_NAME (trans->sinkpad), GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf); GST_OBJECT_UNLOCK (identity); gst_identity_notify_last_message (identity); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_TIMESTAMP (buf) = time; GST_BUFFER_DURATION (buf) = GST_BUFFER_SIZE (buf) * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) runtimestamp = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf)); if ((identity->sync) && (trans->segment.format == GST_FORMAT_TIME)) { GstClock *clock; GST_OBJECT_LOCK (identity); if ((clock = GST_ELEMENT (identity)->clock)) { GstClockReturn cret; GstClockTime timestamp; timestamp = runtimestamp + GST_ELEMENT (identity)->base_time; /* save id if we need to unlock */ /* FIXME: actually unlock this somewhere in the state changes */ identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp); GST_OBJECT_UNLOCK (identity); cret = gst_clock_id_wait (identity->clock_id, NULL); GST_OBJECT_LOCK (identity); if (identity->clock_id) { gst_clock_id_unref (identity->clock_id); identity->clock_id = NULL; } if (cret == GST_CLOCK_UNSCHEDULED) ret = GST_FLOW_UNEXPECTED; } GST_OBJECT_UNLOCK (identity); } identity->offset += GST_BUFFER_SIZE (buf); if (identity->sleep_time && ret == GST_FLOW_OK) g_usleep (identity->sleep_time); if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME) && (ret == GST_FLOW_OK)) { GST_BUFFER_TIMESTAMP (buf) = runtimestamp; GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE; } return ret; }
static void gst_decklink_src_task (void *priv) { GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); GstBuffer *buffer; GstBuffer *audio_buffer; IDeckLinkVideoInputFrame *video_frame; IDeckLinkAudioInputPacket *audio_frame; void *data; int n_samples; GstFlowReturn ret; const GstDecklinkMode *mode; GST_DEBUG_OBJECT (decklinksrc, "task"); g_mutex_lock (decklinksrc->mutex); while (decklinksrc->video_frame == NULL && !decklinksrc->stop) { g_cond_wait (decklinksrc->cond, decklinksrc->mutex); } video_frame = decklinksrc->video_frame; audio_frame = decklinksrc->audio_frame; decklinksrc->video_frame = NULL; decklinksrc->audio_frame = NULL; g_mutex_unlock (decklinksrc->mutex); if (decklinksrc->stop) { GST_DEBUG ("stopping task"); return; } /* warning on dropped frames */ if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) { GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ, ("Dropped %d frame(s), for a total of %d frame(s)", decklinksrc->dropped_frames - decklinksrc->dropped_frames_old, decklinksrc->dropped_frames), (NULL)); decklinksrc->dropped_frames_old = decklinksrc->dropped_frames; } mode = gst_decklink_get_mode (decklinksrc->mode); video_frame->GetBytes (&data); if (decklinksrc->copy_data) { buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2); memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2); video_frame->Release (); } else { buffer = gst_buffer_new (); GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2; GST_BUFFER_DATA (buffer) = (guint8 *) data; GST_BUFFER_FREE_FUNC (buffer) = video_frame_free; GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame; } GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND, mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND, mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num; GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num; if (decklinksrc->frame_num == 0) { GstEvent *event; gboolean ret; GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); if (gst_pad_is_linked (decklinksrc->videosrcpad)) { gst_event_ref (event); ret = gst_pad_push_event (decklinksrc->videosrcpad, event); if (!ret) { GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret); gst_event_unref (event); return; } } else { gst_event_unref (event); } if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { ret = gst_pad_push_event (decklinksrc->audiosrcpad, event); if (!ret) { GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret); gst_event_unref (event); } } else { gst_event_unref (event); } } if (decklinksrc->video_caps == NULL) { decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode); } gst_buffer_set_caps (buffer, decklinksrc->video_caps); ret = gst_pad_push (decklinksrc->videosrcpad, buffer); if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED || ret == GST_FLOW_WRONG_STATE)) { GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (ret))); } if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { n_samples = audio_frame->GetSampleFrameCount (); audio_frame->GetBytes (&data); audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2); GST_BUFFER_TIMESTAMP (audio_buffer) = gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, 1, 48000); GST_BUFFER_DURATION (audio_buffer) = gst_util_uint64_scale_int (n_samples * GST_SECOND, 1, 48000); decklinksrc->num_audio_samples += n_samples; if (decklinksrc->audio_caps == NULL) { decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int", "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, "signed", G_TYPE_BOOLEAN, TRUE, "depth", G_TYPE_INT, 16, "width", G_TYPE_INT, 16, "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, NULL); } gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps); ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); if (!(ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED || ret == GST_FLOW_WRONG_STATE)) { GST_ELEMENT_ERROR (decklinksrc, STREAM, FAILED, ("Internal data stream error."), ("stream stopped, reason %s", gst_flow_get_name (ret))); } } audio_frame->Release (); }
static GstFlowReturn gst_dx9screencapsrc_create (GstPushSrc * push_src, GstBuffer ** buf) { GstDX9ScreenCapSrc *src = GST_DX9SCREENCAPSRC (push_src); GstBuffer *new_buf; gint new_buf_size, i; gint width, height, stride; GstClock *clock; GstClockTime buf_time, buf_dur; D3DLOCKED_RECT locked_rect; LPBYTE p_dst, p_src; HRESULT hres; GstMapInfo map; guint64 frame_number; if (G_UNLIKELY (!src->d3d9_device)) { GST_ELEMENT_ERROR (src, CORE, NEGOTIATION, (NULL), ("format wasn't negotiated before create function")); return GST_FLOW_NOT_NEGOTIATED; } clock = gst_element_get_clock (GST_ELEMENT (src)); if (clock != NULL) { GstClockTime time, base_time; /* Calculate sync time. */ time = gst_clock_get_time (clock); base_time = gst_element_get_base_time (GST_ELEMENT (src)); buf_time = time - base_time; if (src->rate_numerator) { frame_number = gst_util_uint64_scale (buf_time, src->rate_numerator, GST_SECOND * src->rate_denominator); } else { frame_number = -1; } } else { buf_time = GST_CLOCK_TIME_NONE; frame_number = -1; } if (frame_number != -1 && frame_number == src->frame_number) { GstClockID id; GstClockReturn ret; /* Need to wait for the next frame */ frame_number += 1; /* Figure out what the next frame time is */ buf_time = gst_util_uint64_scale (frame_number, src->rate_denominator * GST_SECOND, src->rate_numerator); id = gst_clock_new_single_shot_id (clock, buf_time + gst_element_get_base_time (GST_ELEMENT (src))); GST_OBJECT_LOCK (src); src->clock_id = id; GST_OBJECT_UNLOCK (src); GST_DEBUG_OBJECT (src, "Waiting for next frame time %" G_GUINT64_FORMAT, buf_time); ret = gst_clock_id_wait (id, NULL); GST_OBJECT_LOCK (src); gst_clock_id_unref (id); src->clock_id = NULL; if (ret == GST_CLOCK_UNSCHEDULED) { /* Got woken up by the unlock function */ GST_OBJECT_UNLOCK (src); return GST_FLOW_FLUSHING; } GST_OBJECT_UNLOCK (src); /* Duration is a complete 1/fps frame duration */ buf_dur = gst_util_uint64_scale_int (GST_SECOND, src->rate_denominator, src->rate_numerator); } else if (frame_number != -1) { GstClockTime next_buf_time; GST_DEBUG_OBJECT (src, "No need to wait for next frame time %" G_GUINT64_FORMAT " next frame = %" G_GINT64_FORMAT " prev = %" G_GINT64_FORMAT, buf_time, frame_number, src->frame_number); next_buf_time = gst_util_uint64_scale (frame_number + 1, src->rate_denominator * GST_SECOND, src->rate_numerator); /* Frame duration is from now until the next expected capture time */ buf_dur = next_buf_time - buf_time; } else { buf_dur = GST_CLOCK_TIME_NONE; } src->frame_number = frame_number; height = (src->src_rect.bottom - src->src_rect.top); width = (src->src_rect.right - src->src_rect.left); new_buf_size = width * 4 * height; GST_LOG_OBJECT (src, "creating buffer of %d bytes with %dx%d image", new_buf_size, width, height); /* Do screen capture and put it into buffer... * Aquire front buffer, and lock it */ hres = IDirect3DDevice9_GetFrontBufferData (src->d3d9_device, 0, src->surface); if (FAILED (hres)) { GST_DEBUG_OBJECT (src, "DirectX::GetBackBuffer failed."); return GST_FLOW_ERROR; } if (src->show_cursor) { CURSORINFO ci; ci.cbSize = sizeof (CURSORINFO); GetCursorInfo (&ci); if (ci.flags & CURSOR_SHOWING) { ICONINFO ii; HDC memDC; GetIconInfo (ci.hCursor, &ii); if (SUCCEEDED (IDirect3DSurface9_GetDC (src->surface, &memDC))) { HCURSOR cursor = CopyImage (ci.hCursor, IMAGE_CURSOR, 0, 0, LR_MONOCHROME | LR_DEFAULTSIZE); DrawIcon (memDC, ci.ptScreenPos.x - ii.xHotspot - src->monitor_info.rcMonitor.left, ci.ptScreenPos.y - ii.yHotspot - src->monitor_info.rcMonitor.top, cursor); DestroyCursor (cursor); IDirect3DSurface9_ReleaseDC (src->surface, memDC); } DeleteObject (ii.hbmColor); DeleteObject (ii.hbmMask); } } hres = IDirect3DSurface9_LockRect (src->surface, &locked_rect, &(src->src_rect), D3DLOCK_NO_DIRTY_UPDATE | D3DLOCK_NOSYSLOCK | D3DLOCK_READONLY); if (FAILED (hres)) { GST_DEBUG_OBJECT (src, "DirectX::LockRect failed."); return GST_FLOW_ERROR; } new_buf = gst_buffer_new_and_alloc (new_buf_size); gst_buffer_map (new_buf, &map, GST_MAP_WRITE); p_dst = (LPBYTE) map.data; p_src = (LPBYTE) locked_rect.pBits; stride = width * 4; for (i = 0; i < height; ++i) { memcpy (p_dst, p_src, stride); p_dst += stride; p_src += locked_rect.Pitch; } gst_buffer_unmap (new_buf, &map); /* Unlock copy of front buffer */ IDirect3DSurface9_UnlockRect (src->surface); GST_BUFFER_TIMESTAMP (new_buf) = buf_time; GST_BUFFER_DURATION (new_buf) = buf_dur; if (clock != NULL) gst_object_unref (clock); *buf = new_buf; return GST_FLOW_OK; }
static GstFlowReturn gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime rundts = GST_CLOCK_TIME_NONE; GstClockTime runpts = GST_CLOCK_TIME_NONE; GstClockTime ts, duration, runtimestamp; gsize size; size = gst_buffer_get_size (buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) goto error_after; } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) goto dropped; } if (identity->dump) { GstMapInfo info; gst_buffer_map (buf, &info, GST_MAP_READ); gst_util_dump_mem (info.data, info.size); gst_buffer_unmap (buf, &info); } if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "chain", buf, size); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = time; GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) { rundts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_DTS (buf)); runpts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_PTS (buf)); } if (GST_CLOCK_TIME_IS_VALID (rundts)) runtimestamp = rundts; else if (GST_CLOCK_TIME_IS_VALID (runpts)) runtimestamp = runpts; else runtimestamp = 0; ret = gst_identity_do_sync (identity, runtimestamp); identity->offset += size; if (identity->sleep_time && ret == GST_FLOW_OK) g_usleep (identity->sleep_time); if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME) && (ret == GST_FLOW_OK)) { GST_BUFFER_DTS (buf) = rundts; GST_BUFFER_PTS (buf) = runpts; GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE; } return ret; /* ERRORS */ error_after: { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } dropped: { if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "dropping", buf, size); } ts = GST_BUFFER_TIMESTAMP (buf); if (GST_CLOCK_TIME_IS_VALID (ts)) { duration = GST_BUFFER_DURATION (buf); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (identity), gst_event_new_gap (ts, duration)); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
static GstFlowReturn gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer) { GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GstFlowReturn ret = GST_FLOW_OK; GstVdpPicture current_pic; guint32 video_surfaces_past_count; VdpVideoSurface video_surfaces_past[MAX_PICTURES]; guint32 video_surfaces_future_count; VdpVideoSurface video_surfaces_future[MAX_PICTURES]; if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (vpp, "Received discont buffer"); gst_vdp_vpp_flush (vpp); } gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); while (gst_vdp_vpp_get_next_picture (vpp, ¤t_pic, &video_surfaces_past_count, video_surfaces_past, &video_surfaces_future_count, video_surfaces_future)) { GstVdpOutputBuffer *outbuf; GstStructure *structure; GstVideoRectangle src_r = { 0, } , dest_r = { 0,}; gint par_n, par_d; VdpRect rect; GstVdpDevice *device; VdpStatus status; ret = gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad), &outbuf); if (ret != GST_FLOW_OK) break; structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0); if (!gst_structure_get_int (structure, "width", &src_r.w) || !gst_structure_get_int (structure, "height", &src_r.h)) goto invalid_caps; if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d)) { gint new_width; new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d); src_r.x += (src_r.w - new_width) / 2; src_r.w = new_width; } structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0); if (!gst_structure_get_int (structure, "width", &dest_r.w) || !gst_structure_get_int (structure, "height", &dest_r.h)) goto invalid_caps; if (vpp->force_aspect_ratio) { GstVideoRectangle res_r; gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE); rect.x0 = res_r.x; rect.x1 = res_r.w + res_r.x; rect.y0 = res_r.y; rect.y1 = res_r.h + res_r.y; } else { rect.x0 = 0; rect.x1 = dest_r.w; rect.y0 = 0; rect.y1 = dest_r.h; } device = vpp->device; status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL, current_pic.structure, video_surfaces_past_count, video_surfaces_past, current_pic.buf->surface, video_surfaces_future_count, video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL); if (status != VDP_STATUS_OK) { GST_ELEMENT_ERROR (vpp, RESOURCE, READ, ("Could not post process frame"), ("Error returned from vdpau was: %s", device->vdp_get_error_string (status))); ret = GST_FLOW_ERROR; goto done; } GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp; if (gst_vdp_vpp_is_interlaced (vpp)) GST_BUFFER_DURATION (outbuf) = vpp->field_duration; else GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL); if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP)) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf)); if (ret != GST_FLOW_OK) break; continue; invalid_caps: gst_buffer_unref (GST_BUFFER (outbuf)); ret = GST_FLOW_ERROR; break; } done: gst_object_unref (vpp); return ret; }
static GstFlowReturn gst_interleave_collected (GstCollectPads * pads, GstInterleave * self) { guint size; GstBuffer *outbuf; GstFlowReturn ret = GST_FLOW_OK; GSList *collected; guint nsamples; guint ncollected = 0; gboolean empty = TRUE; gint width = self->width / 8; g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->width > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->channels > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->rate > 0, GST_FLOW_NOT_NEGOTIATED); size = gst_collect_pads_available (pads); g_return_val_if_fail (size % width == 0, GST_FLOW_ERROR); GST_DEBUG_OBJECT (self, "Starting to collect %u bytes from %d channels", size, self->channels); nsamples = size / width; ret = gst_pad_alloc_buffer (self->src, GST_BUFFER_OFFSET_NONE, size * self->channels, GST_PAD_CAPS (self->src), &outbuf); if (ret != GST_FLOW_OK) { return ret; } else if (outbuf == NULL || GST_BUFFER_SIZE (outbuf) < size * self->channels) { gst_buffer_unref (outbuf); return GST_FLOW_NOT_NEGOTIATED; } else if (!gst_caps_is_equal (GST_BUFFER_CAPS (outbuf), GST_PAD_CAPS (self->src))) { gst_buffer_unref (outbuf); return GST_FLOW_NOT_NEGOTIATED; } memset (GST_BUFFER_DATA (outbuf), 0, size * self->channels); for (collected = pads->data; collected != NULL; collected = collected->next) { GstCollectData *cdata; GstBuffer *inbuf; guint8 *outdata; cdata = (GstCollectData *) collected->data; inbuf = gst_collect_pads_take_buffer (pads, cdata, size); if (inbuf == NULL) { GST_DEBUG_OBJECT (cdata->pad, "No buffer available"); goto next; } ncollected++; if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP)) goto next; empty = FALSE; outdata = GST_BUFFER_DATA (outbuf) + width * GST_INTERLEAVE_PAD_CAST (cdata->pad)->channel; self->func (outdata, GST_BUFFER_DATA (inbuf), self->channels, nsamples); next: if (inbuf) gst_buffer_unref (inbuf); } if (ncollected == 0) goto eos; if (self->segment_pending) { GstEvent *event; event = gst_event_new_new_segment_full (FALSE, self->segment_rate, 1.0, GST_FORMAT_TIME, self->timestamp, -1, self->segment_position); gst_pad_push_event (self->src, event); self->segment_pending = FALSE; self->segment_position = 0; } GST_BUFFER_TIMESTAMP (outbuf) = self->timestamp; GST_BUFFER_OFFSET (outbuf) = self->offset; self->offset += nsamples; self->timestamp = gst_util_uint64_scale_int (self->offset, GST_SECOND, self->rate); GST_BUFFER_DURATION (outbuf) = self->timestamp - GST_BUFFER_TIMESTAMP (outbuf); if (empty) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); GST_LOG_OBJECT (self, "pushing outbuf, timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); ret = gst_pad_push (self->src, outbuf); return ret; eos: { GST_DEBUG_OBJECT (self, "no data available, must be EOS"); gst_buffer_unref (outbuf); gst_pad_push_event (self->src, gst_event_new_eos ()); return GST_FLOW_UNEXPECTED; } }
static GstFlowReturn pad_chain (GstPad *pad, GstBuffer *buf) { GOmxCore *gomx; GOmxPort *in_port; GstOmxBaseFilter *self; GstFlowReturn ret = GST_FLOW_OK; self = GST_OMX_BASE_FILTER (GST_OBJECT_PARENT (pad)); gomx = self->gomx; GST_LOG_OBJECT (self, "begin"); GST_LOG_OBJECT (self, "gst_buffer: size=%lu", GST_BUFFER_SIZE (buf)); GST_LOG_OBJECT (self, "state: %d", gomx->omx_state); if (G_UNLIKELY (gomx->omx_state == OMX_StateLoaded)) { GST_INFO_OBJECT (self, "omx: prepare"); /** @todo this should probably go after doing preparations. */ if (self->omx_setup) { self->omx_setup (self); } setup_ports (self); g_omx_core_prepare (self->gomx); self->initialized = TRUE; gst_pad_start_task (self->srcpad, output_loop, self->srcpad); } in_port = self->in_port; if (G_LIKELY (in_port->enabled)) { guint buffer_offset = 0; if (G_UNLIKELY (gomx->omx_state == OMX_StateIdle)) { GST_INFO_OBJECT (self, "omx: play"); g_omx_core_start (gomx); /* send buffer with codec data flag */ /** @todo move to util */ if (self->codec_data) { OMX_BUFFERHEADERTYPE *omx_buffer; GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (in_port); if (G_LIKELY (omx_buffer)) { omx_buffer->nFlags |= 0x00000080; /* codec data flag */ omx_buffer->nFilledLen = GST_BUFFER_SIZE (self->codec_data); memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (self->codec_data), omx_buffer->nFilledLen); GST_LOG_OBJECT (self, "release_buffer"); g_omx_port_release_buffer (in_port, omx_buffer); } } } if (G_UNLIKELY (gomx->omx_state != OMX_StateExecuting)) { GST_ERROR_OBJECT (self, "Whoa! very wrong"); } while (G_LIKELY (buffer_offset < GST_BUFFER_SIZE (buf))) { OMX_BUFFERHEADERTYPE *omx_buffer; if (self->last_pad_push_return != GST_FLOW_OK) { goto out_flushing; } GST_LOG_OBJECT (self, "request buffer"); omx_buffer = g_omx_port_request_buffer (in_port); GST_LOG_OBJECT (self, "omx_buffer: %p", omx_buffer); if (G_LIKELY (omx_buffer)) { GST_DEBUG_OBJECT (self, "omx_buffer: size=%lu, len=%lu, flags=%lu, offset=%lu, timestamp=%lld", omx_buffer->nAllocLen, omx_buffer->nFilledLen, omx_buffer->nFlags, omx_buffer->nOffset, omx_buffer->nTimeStamp); if (omx_buffer->nOffset == 0 && self->share_input_buffer) { { GstBuffer *old_buf; old_buf = omx_buffer->pAppPrivate; if (old_buf) { gst_buffer_unref (old_buf); } else if (omx_buffer->pBuffer) { g_free (omx_buffer->pBuffer); } } omx_buffer->pBuffer = GST_BUFFER_DATA (buf); omx_buffer->nAllocLen = GST_BUFFER_SIZE (buf); omx_buffer->nFilledLen = GST_BUFFER_SIZE (buf); omx_buffer->pAppPrivate = buf; } else { omx_buffer->nFilledLen = MIN (GST_BUFFER_SIZE (buf) - buffer_offset, omx_buffer->nAllocLen - omx_buffer->nOffset); memcpy (omx_buffer->pBuffer + omx_buffer->nOffset, GST_BUFFER_DATA (buf) + buffer_offset, omx_buffer->nFilledLen); } if (self->use_timestamps) { omx_buffer->nTimeStamp = gst_util_uint64_scale_int (GST_BUFFER_TIMESTAMP (buf), OMX_TICKS_PER_SECOND, GST_SECOND); } buffer_offset += omx_buffer->nFilledLen; GST_LOG_OBJECT (self, "release_buffer"); /** @todo untaint buffer */ g_omx_port_release_buffer (in_port, omx_buffer); } else { GST_WARNING_OBJECT (self, "null buffer"); goto out_flushing; } } } else { GST_WARNING_OBJECT (self, "done"); ret = GST_FLOW_UNEXPECTED; } if (!self->share_input_buffer) { gst_buffer_unref (buf); } GST_LOG_OBJECT (self, "end"); return ret; /* special conditions */ out_flushing: { gst_buffer_unref (buf); return self->last_pad_push_return; } }
static void gst_musepackdec_loop (GstPad * sinkpad) { GstMusepackDec *musepackdec; GstFlowReturn flow; GstBuffer *out; #ifdef MPC_IS_OLD_API guint32 update_acc, update_bits; #else mpc_frame_info frame; mpc_status err; #endif gint num_samples, samplerate, bitspersample; musepackdec = GST_MUSEPACK_DEC (GST_PAD_PARENT (sinkpad)); samplerate = g_atomic_int_get (&musepackdec->rate); if (samplerate == 0) { if (!gst_musepack_stream_init (musepackdec)) goto pause_task; gst_musepackdec_send_newsegment (musepackdec); samplerate = g_atomic_int_get (&musepackdec->rate); } bitspersample = g_atomic_int_get (&musepackdec->bps); flow = gst_pad_alloc_buffer_and_set_caps (musepackdec->srcpad, -1, MPC_DECODER_BUFFER_LENGTH * 4, GST_PAD_CAPS (musepackdec->srcpad), &out); if (flow != GST_FLOW_OK) { GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow)); goto pause_task; } #ifdef MPC_IS_OLD_API num_samples = mpc_decoder_decode (musepackdec->d, (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out), &update_acc, &update_bits); if (num_samples < 0) { GST_ERROR_OBJECT (musepackdec, "Failed to decode sample"); GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL)); goto pause_task; } else if (num_samples == 0) { goto eos_and_pause; } #else frame.buffer = (MPC_SAMPLE_FORMAT *) GST_BUFFER_DATA (out); err = mpc_demux_decode (musepackdec->d, &frame); if (err != MPC_STATUS_OK) { GST_ERROR_OBJECT (musepackdec, "Failed to decode sample"); GST_ELEMENT_ERROR (musepackdec, STREAM, DECODE, (NULL), (NULL)); goto pause_task; } else if (frame.bits == -1) { goto eos_and_pause; } num_samples = frame.samples; #endif GST_BUFFER_SIZE (out) = num_samples * bitspersample; GST_BUFFER_OFFSET (out) = musepackdec->segment.last_stop; GST_BUFFER_TIMESTAMP (out) = gst_util_uint64_scale_int (musepackdec->segment.last_stop, GST_SECOND, samplerate); GST_BUFFER_DURATION (out) = gst_util_uint64_scale_int (num_samples, GST_SECOND, samplerate); musepackdec->segment.last_stop += num_samples; GST_LOG_OBJECT (musepackdec, "Pushing buffer, timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out))); flow = gst_pad_push (musepackdec->srcpad, out); if (flow != GST_FLOW_OK) { GST_DEBUG_OBJECT (musepackdec, "Flow: %s", gst_flow_get_name (flow)); goto pause_task; } /* check if we're at the end of a configured segment */ if (musepackdec->segment.stop != -1 && musepackdec->segment.last_stop >= musepackdec->segment.stop) { gint64 stop_time; GST_DEBUG_OBJECT (musepackdec, "Reached end of configured segment"); if ((musepackdec->segment.flags & GST_SEEK_FLAG_SEGMENT) == 0) goto eos_and_pause; GST_DEBUG_OBJECT (musepackdec, "Posting SEGMENT_DONE message"); stop_time = gst_util_uint64_scale_int (musepackdec->segment.stop, GST_SECOND, samplerate); gst_element_post_message (GST_ELEMENT (musepackdec), gst_message_new_segment_done (GST_OBJECT (musepackdec), GST_FORMAT_TIME, stop_time)); goto pause_task; } return; eos_and_pause: { GST_DEBUG_OBJECT (musepackdec, "sending EOS event"); gst_pad_push_event (musepackdec->srcpad, gst_event_new_eos ()); /* fall through to pause */ } pause_task: { GST_DEBUG_OBJECT (musepackdec, "Pausing task"); gst_pad_pause_task (sinkpad); return; } }