static gboolean theora_enc_read_multipass_cache (GstTheoraEnc * enc) { GstBuffer *cache_buf; const guint8 *cache_data; gsize bytes_read = 0; gssize bytes_consumed = 0; GIOStatus stat = G_IO_STATUS_NORMAL; gboolean done = FALSE; while (!done) { if (gst_adapter_available (enc->multipass_cache_adapter) == 0) { GstMapInfo minfo; cache_buf = gst_buffer_new_allocate (NULL, 512, NULL); gst_buffer_map (cache_buf, &minfo, GST_MAP_WRITE); stat = g_io_channel_read_chars (enc->multipass_cache_fd, (gchar *) minfo.data, minfo.size, &bytes_read, NULL); if (bytes_read <= 0) { gst_buffer_unmap (cache_buf, &minfo); gst_buffer_unref (cache_buf); break; } else { gst_buffer_unmap (cache_buf, &minfo); gst_buffer_resize (cache_buf, 0, bytes_read); gst_adapter_push (enc->multipass_cache_adapter, cache_buf); } } if (gst_adapter_available (enc->multipass_cache_adapter) == 0) break; bytes_read = MIN (gst_adapter_available (enc->multipass_cache_adapter), 512); cache_data = gst_adapter_map (enc->multipass_cache_adapter, bytes_read); bytes_consumed = th_encode_ctl (enc->encoder, TH_ENCCTL_2PASS_IN, (guint8 *) cache_data, bytes_read); gst_adapter_unmap (enc->multipass_cache_adapter); done = bytes_consumed <= 0; if (bytes_consumed > 0) gst_adapter_flush (enc->multipass_cache_adapter, bytes_consumed); } if (stat == G_IO_STATUS_ERROR || (stat == G_IO_STATUS_EOF && bytes_read == 0) || bytes_consumed < 0) { GST_ELEMENT_ERROR (enc, RESOURCE, READ, (NULL), ("Failed to read multipass cache file")); return FALSE; } return TRUE; }
static GstFlowReturn gst_amrnbenc_handle_frame (GstAudioEncoder * enc, GstBuffer * buffer) { GstAmrnbEnc *amrnbenc; GstFlowReturn ret; GstBuffer *out; GstMapInfo in_map, out_map; gsize out_size; amrnbenc = GST_AMRNBENC (enc); g_return_val_if_fail (amrnbenc->handle, GST_FLOW_FLUSHING); /* we don't deal with squeezing remnants, so simply discard those */ if (G_UNLIKELY (buffer == NULL)) { GST_DEBUG_OBJECT (amrnbenc, "no data"); return GST_FLOW_OK; } gst_buffer_map (buffer, &in_map, GST_MAP_READ); if (G_UNLIKELY (in_map.size < 320)) { gst_buffer_unmap (buffer, &in_map); GST_DEBUG_OBJECT (amrnbenc, "discarding trailing data of %" G_GSIZE_FORMAT " bytes", in_map.size); return gst_audio_encoder_finish_frame (enc, NULL, -1); } /* get output, max size is 32 */ out = gst_buffer_new_and_alloc (32); /* AMR encoder actually writes into the source data buffers it gets */ /* should be able to handle that with what we are given */ gst_buffer_map (out, &out_map, GST_MAP_WRITE); /* encode */ out_size = Encoder_Interface_Encode (amrnbenc->handle, amrnbenc->bandmode, (short *) in_map.data, out_map.data, 0); gst_buffer_unmap (out, &out_map); gst_buffer_resize (out, 0, out_size); gst_buffer_unmap (buffer, &in_map); GST_LOG_OBJECT (amrnbenc, "output data size %" G_GSIZE_FORMAT, out_size); if (out_size) { ret = gst_audio_encoder_finish_frame (enc, out, 160); } else { /* should not happen (without dtx or so at least) */ GST_WARNING_OBJECT (amrnbenc, "no encoded data; discarding input"); gst_buffer_unref (out); ret = gst_audio_encoder_finish_frame (enc, NULL, -1); } return ret; }
static GstFlowReturn gst_v4l2_buffer_pool_copy_buffer (GstV4l2BufferPool * pool, GstBuffer * dest, GstBuffer * src) { const GstVideoFormatInfo *finfo = pool->caps_info.finfo; GST_LOG_OBJECT (pool, "copying buffer"); if (finfo && (finfo->format != GST_VIDEO_FORMAT_UNKNOWN && finfo->format != GST_VIDEO_FORMAT_ENCODED)) { GstVideoFrame src_frame, dest_frame; GST_DEBUG_OBJECT (pool, "copy video frame"); /* we have raw video, use videoframe copy to get strides right */ if (!gst_video_frame_map (&src_frame, &pool->caps_info, src, GST_MAP_READ)) goto invalid_buffer; if (!gst_video_frame_map (&dest_frame, &pool->caps_info, dest, GST_MAP_WRITE)) { gst_video_frame_unmap (&src_frame); goto invalid_buffer; } gst_video_frame_copy (&dest_frame, &src_frame); gst_video_frame_unmap (&src_frame); gst_video_frame_unmap (&dest_frame); } else { GstMapInfo map; GST_DEBUG_OBJECT (pool, "copy raw bytes"); if (!gst_buffer_map (src, &map, GST_MAP_READ)) goto invalid_buffer; gst_buffer_fill (dest, 0, map.data, gst_buffer_get_size (src)); gst_buffer_unmap (src, &map); gst_buffer_resize (dest, 0, gst_buffer_get_size (src)); } GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, pool, "slow copy into buffer %p", dest); return GST_FLOW_OK; invalid_buffer: { GST_ERROR_OBJECT (pool, "could not map buffer"); return GST_FLOW_ERROR; } }
/* generate audio data and advance internal timers */ static GstBuffer * gst_wildmidi_get_buffer (GstWildmidi * wildmidi) { size_t size; gint64 samples; GstBuffer *buffer; GstSegment *segment; GstMapInfo info; guint bpf; bpf = wildmidi->bytes_per_frame; buffer = gst_buffer_new_and_alloc (256 * bpf); gst_buffer_map (buffer, &info, GST_MAP_READWRITE); GST_OBJECT_LOCK (wildmidi); size = WildMidi_GetOutput (wildmidi->song, (gpointer) info.data, (unsigned long int) info.size); GST_OBJECT_UNLOCK (wildmidi); gst_buffer_unmap (buffer, &info); if (size == 0) { gst_buffer_unref (buffer); return NULL; } /* adjust buffer size */ gst_buffer_resize (buffer, 0, size); segment = wildmidi->o_segment; GST_BUFFER_OFFSET (buffer) = segment->position; GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (segment->position, GST_SECOND, WILDMIDI_RATE); samples = size / bpf; segment->position += samples; GST_BUFFER_OFFSET_END (buffer) = segment->position; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (segment->position, GST_SECOND, WILDMIDI_RATE) - GST_BUFFER_TIMESTAMP (buffer); GST_DEBUG_OBJECT (wildmidi, "buffer ts: %" GST_TIME_FORMAT ", " "duration: %" GST_TIME_FORMAT " (%" G_GINT64_FORMAT " samples)", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)), samples); return gst_wildmidi_clip_buffer (wildmidi, buffer); }
/* Internal method only. Tries to merge buffers at the head of the queue * to form a single larger buffer of size 'size'. * * Returns TRUE if it managed to merge anything. */ static gboolean gst_adapter_try_to_merge_up (GstAdapter * adapter, gsize size) { GstBuffer *cur, *head; GSList *g; gboolean ret = FALSE; gsize hsize; g = adapter->buflist; if (g == NULL) return FALSE; head = g->data; hsize = gst_buffer_get_size (head); /* Remove skipped part from the buffer (otherwise the buffer might grow indefinitely) */ head = gst_buffer_make_writable (head); gst_buffer_resize (head, adapter->skip, hsize - adapter->skip); hsize -= adapter->skip; adapter->skip = 0; g->data = head; g = g_slist_next (g); while (g != NULL && hsize < size) { cur = g->data; /* Merge the head buffer and the next in line */ GST_LOG_OBJECT (adapter, "Merging buffers of size %" G_GSIZE_FORMAT " & %" G_GSIZE_FORMAT " in search of target %" G_GSIZE_FORMAT, hsize, gst_buffer_get_size (cur), size); head = gst_buffer_append (head, cur); hsize = gst_buffer_get_size (head); ret = TRUE; /* Delete the front list item, and store our new buffer in the 2nd list * item */ adapter->buflist = g_slist_delete_link (adapter->buflist, adapter->buflist); g->data = head; /* invalidate scan position */ adapter->scan_offset = 0; adapter->scan_entry = NULL; g = g_slist_next (g); } return ret; }
static GstFlowReturn gst_hls_demux_finish_fragment (GstAdaptiveDemux * demux, GstAdaptiveDemuxStream * stream) { GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux); GstFlowReturn ret = GST_FLOW_OK; if (hlsdemux->current_key) gst_hls_demux_decrypt_end (hlsdemux); /* ideally this should be empty, but this eos might have been * caused by an error on the source element */ GST_DEBUG_OBJECT (demux, "Data still on the adapter when EOS was received" ": %" G_GSIZE_FORMAT, gst_adapter_available (stream->adapter)); gst_adapter_clear (stream->adapter); if (stream->last_ret == GST_FLOW_OK) { if (hlsdemux->pending_buffer) { if (hlsdemux->current_key) { GstMapInfo info; gssize unpadded_size; /* Handle pkcs7 unpadding here */ gst_buffer_map (hlsdemux->pending_buffer, &info, GST_MAP_READ); unpadded_size = info.size - info.data[info.size - 1]; gst_buffer_unmap (hlsdemux->pending_buffer, &info); gst_buffer_resize (hlsdemux->pending_buffer, 0, unpadded_size); } ret = gst_hls_demux_handle_buffer (demux, stream, hlsdemux->pending_buffer, TRUE); hlsdemux->pending_buffer = NULL; } } else { if (hlsdemux->pending_buffer) gst_buffer_unref (hlsdemux->pending_buffer); hlsdemux->pending_buffer = NULL; } if (ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED) return gst_adaptive_demux_stream_advance_fragment (demux, stream, stream->fragment.duration); return ret; }
static GstBuffer * gst_wildmidi_clip_buffer (GstWildmidi * wildmidi, GstBuffer * buffer) { guint64 start, stop; guint64 new_start, new_stop; gint64 offset, length; guint64 bpf; /* clipping disabled for now */ return buffer; start = GST_BUFFER_OFFSET (buffer); stop = GST_BUFFER_OFFSET_END (buffer); if (!gst_segment_clip (wildmidi->o_segment, GST_FORMAT_DEFAULT, start, stop, &new_start, &new_stop)) { gst_buffer_unref (buffer); return NULL; } if (start == new_start && stop == new_stop) return buffer; offset = new_start - start; length = new_stop - new_start; bpf = wildmidi->bytes_per_frame; buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, offset * bpf, length * bpf); GST_BUFFER_OFFSET (buffer) = new_start; GST_BUFFER_OFFSET_END (buffer) = new_stop; GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (new_start, GST_SECOND, WILDMIDI_RATE); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (new_stop, GST_SECOND, WILDMIDI_RATE) - GST_BUFFER_TIMESTAMP (buffer); return buffer; }
static GstFlowReturn gst_hls_demux_finish_fragment (GstAdaptiveDemux * demux, GstAdaptiveDemuxStream * stream) { GstHLSDemux *hlsdemux = GST_HLS_DEMUX_CAST (demux); GstFlowReturn ret = GST_FLOW_OK; if (hlsdemux->current_key) gst_hls_demux_decrypt_end (hlsdemux); if (stream->last_ret == GST_FLOW_OK) { if (hlsdemux->pending_decrypted_buffer) { if (hlsdemux->current_key) { GstMapInfo info; gssize unpadded_size; /* Handle pkcs7 unpadding here */ gst_buffer_map (hlsdemux->pending_decrypted_buffer, &info, GST_MAP_READ); unpadded_size = info.size - info.data[info.size - 1]; gst_buffer_unmap (hlsdemux->pending_decrypted_buffer, &info); gst_buffer_resize (hlsdemux->pending_decrypted_buffer, 0, unpadded_size); } ret = gst_hls_demux_handle_buffer (demux, stream, hlsdemux->pending_decrypted_buffer, TRUE); hlsdemux->pending_decrypted_buffer = NULL; } } gst_hls_demux_clear_pending_data (hlsdemux); if (ret == GST_FLOW_OK || ret == GST_FLOW_NOT_LINKED) return gst_adaptive_demux_stream_advance_fragment (demux, stream, stream->fragment.duration); return ret; }
static GstBuffer * gst_fluid_dec_clip_buffer (GstFluidDec * fluiddec, GstBuffer * buffer) { guint64 start, stop; guint64 new_start, new_stop; gint64 offset, length; /* clipping disabled for now */ return buffer; start = GST_BUFFER_OFFSET (buffer); stop = GST_BUFFER_OFFSET_END (buffer); if (!gst_segment_clip (&fluiddec->segment, GST_FORMAT_DEFAULT, start, stop, &new_start, &new_stop)) { gst_buffer_unref (buffer); return NULL; } if (start == new_start && stop == new_stop) return buffer; offset = new_start - start; length = new_stop - new_start; buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, offset, length); GST_BUFFER_OFFSET (buffer) = new_start; GST_BUFFER_OFFSET_END (buffer) = new_stop; GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale_int (new_start, GST_SECOND, FLUID_DEC_RATE); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (new_stop, GST_SECOND, FLUID_DEC_RATE) - GST_BUFFER_TIMESTAMP (buffer); return buffer; }
static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer) { GstFlowReturn res = GST_FLOW_OK; gsize size; guint8 *data; GstBuffer *outbuf, *bufd; gint16 *out_data; int n, err; int samples; unsigned int packet_size; GstBuffer *buf; GstMapInfo map, omap; GstAudioClippingMeta *cmeta = NULL; if (dec->state == NULL) { /* If we did not get any headers, default to 2 channels */ if (dec->n_channels == 0) { GST_INFO_OBJECT (dec, "No header, assuming single stream"); dec->n_channels = 2; dec->sample_rate = 48000; /* default stereo mapping */ dec->channel_mapping_family = 0; dec->channel_mapping[0] = 0; dec->channel_mapping[1] = 1; dec->n_streams = 1; dec->n_stereo_streams = 1; if (!gst_opus_dec_negotiate (dec, NULL)) return GST_FLOW_NOT_NEGOTIATED; } if (dec->n_channels == 2 && dec->n_streams == 1 && dec->n_stereo_streams == 0) { /* if we are automatically decoding 2 channels, but only have a single encoded one, direct both channels to it */ dec->channel_mapping[1] = 0; } GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz", dec->n_channels, dec->sample_rate); #ifndef GST_DISABLE_GST_DEBUG gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug, "Mapping table", dec->n_channels, dec->channel_mapping); #endif GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams, dec->n_stereo_streams); dec->state = opus_multistream_decoder_create (dec->sample_rate, dec->n_channels, dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err); if (!dec->state || err != OPUS_OK) goto creation_failed; } if (buffer) { GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); } else { GST_DEBUG_OBJECT (dec, "Received missing buffer"); } /* if using in-band FEC, we introdude one extra frame's delay as we need to potentially wait for next buffer to decode a missing buffer */ if (dec->use_inband_fec && !dec->primed) { GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out"); gst_buffer_replace (&dec->last_buffer, buffer); dec->primed = TRUE; goto done; } /* That's the buffer we'll be sending to the opus decoder. */ buf = (dec->use_inband_fec && gst_buffer_get_size (dec->last_buffer) > 0) ? dec->last_buffer : buffer; /* That's the buffer we get duration from */ bufd = dec->use_inband_fec ? dec->last_buffer : buffer; if (buf && gst_buffer_get_size (buf) > 0) { gst_buffer_map (buf, &map, GST_MAP_READ); data = map.data; size = map.size; GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size); } else { /* concealment data, pass NULL as the bits parameters */ GST_DEBUG_OBJECT (dec, "Using NULL buffer"); data = NULL; size = 0; } if (gst_buffer_get_size (bufd) == 0) { GstClockTime const opus_plc_alignment = 2500 * GST_USECOND; GstClockTime aligned_missing_duration; GstClockTime missing_duration = GST_BUFFER_DURATION (bufd); if (!GST_CLOCK_TIME_IS_VALID (missing_duration) || missing_duration == 0) { if (GST_CLOCK_TIME_IS_VALID (dec->last_known_buffer_duration)) { missing_duration = dec->last_known_buffer_duration; GST_WARNING_OBJECT (dec, "Missing duration, using last duration %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration)); } else { GST_WARNING_OBJECT (dec, "Missing buffer, but unknown duration, and no previously known duration, assuming 20 ms"); missing_duration = 20 * GST_MSECOND; } } GST_DEBUG_OBJECT (dec, "missing buffer, doing PLC duration %" GST_TIME_FORMAT " plus leftover %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration), GST_TIME_ARGS (dec->leftover_plc_duration)); /* add the leftover PLC duration to that of the buffer */ missing_duration += dec->leftover_plc_duration; /* align the combined buffer and leftover PLC duration to multiples * of 2.5ms, rounding to nearest, and store excess duration for later */ aligned_missing_duration = ((missing_duration + opus_plc_alignment / 2) / opus_plc_alignment) * opus_plc_alignment; dec->leftover_plc_duration = missing_duration - aligned_missing_duration; /* Opus' PLC cannot operate with less than 2.5ms; skip PLC * and accumulate the missing duration in the leftover_plc_duration * for the next PLC attempt */ if (aligned_missing_duration < opus_plc_alignment) { GST_DEBUG_OBJECT (dec, "current duration %" GST_TIME_FORMAT " of missing data not enough for PLC (minimum needed: %" GST_TIME_FORMAT ") - skipping", GST_TIME_ARGS (missing_duration), GST_TIME_ARGS (opus_plc_alignment)); goto done; } /* convert the duration (in nanoseconds) to sample count */ samples = gst_util_uint64_scale_int (aligned_missing_duration, dec->sample_rate, GST_SECOND); GST_DEBUG_OBJECT (dec, "calculated PLC frame length: %" GST_TIME_FORMAT " num frame samples: %d new leftover: %" GST_TIME_FORMAT, GST_TIME_ARGS (aligned_missing_duration), samples, GST_TIME_ARGS (dec->leftover_plc_duration)); } else { /* use maximum size (120 ms) as the number of returned samples is not constant over the stream. */ samples = 120 * dec->sample_rate / 1000; } packet_size = samples * dec->n_channels * 2; outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec), packet_size); if (!outbuf) { goto buffer_failed; } if (size > 0) dec->last_known_buffer_duration = packet_duration_opus (data, size); gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); out_data = (gint16 *) omap.data; do { if (dec->use_inband_fec) { if (gst_buffer_get_size (dec->last_buffer) > 0) { /* normal delayed decode */ GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } else { /* FEC reconstruction decode */ GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 1); } } else { /* normal decode */ GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } if (n == OPUS_BUFFER_TOO_SMALL) { /* if too small, add 2.5 milliseconds and try again, up to the * Opus max size of 120 milliseconds */ if (samples >= 120 * dec->sample_rate / 1000) break; samples += 25 * dec->sample_rate / 10000; packet_size = samples * dec->n_channels * 2; gst_buffer_unmap (outbuf, &omap); gst_buffer_unref (outbuf); outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec), packet_size); if (!outbuf) { goto buffer_failed; } gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); out_data = (gint16 *) omap.data; } } while (n == OPUS_BUFFER_TOO_SMALL); gst_buffer_unmap (outbuf, &omap); if (data != NULL) gst_buffer_unmap (buf, &map); if (n < 0) { GstFlowReturn ret = GST_FLOW_ERROR; gst_buffer_unref (outbuf); GST_AUDIO_DECODER_ERROR (dec, 1, STREAM, DECODE, (NULL), ("Decoding error (%d): %s", n, opus_strerror (n)), ret); return ret; } GST_DEBUG_OBJECT (dec, "decoded %d samples", n); gst_buffer_set_size (outbuf, n * 2 * dec->n_channels); GST_BUFFER_DURATION (outbuf) = samples * GST_SECOND / dec->sample_rate; samples = n; cmeta = gst_buffer_get_audio_clipping_meta (buf); g_assert (!cmeta || cmeta->format == GST_FORMAT_DEFAULT); /* Skip any samples that need skipping */ if (cmeta && cmeta->start) { guint pre_skip = cmeta->start; guint scaled_pre_skip = pre_skip * dec->sample_rate / 48000; guint skip = scaled_pre_skip > n ? n : scaled_pre_skip; guint scaled_skip = skip * 48000 / dec->sample_rate; gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1); GST_INFO_OBJECT (dec, "Skipping %u samples at the beginning (%u at 48000 Hz)", skip, scaled_skip); } if (cmeta && cmeta->end) { guint post_skip = cmeta->end; guint scaled_post_skip = post_skip * dec->sample_rate / 48000; guint skip = scaled_post_skip > n ? n : scaled_post_skip; guint scaled_skip = skip * 48000 / dec->sample_rate; guint outsize = gst_buffer_get_size (outbuf); guint skip_bytes = skip * 2 * dec->n_channels; if (outsize > skip_bytes) outsize -= skip_bytes; else outsize = 0; gst_buffer_resize (outbuf, 0, outsize); GST_INFO_OBJECT (dec, "Skipping %u samples at the end (%u at 48000 Hz)", skip, scaled_skip); } if (gst_buffer_get_size (outbuf) == 0) { gst_buffer_unref (outbuf); outbuf = NULL; } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) { gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16, dec->n_channels, dec->opus_pos, dec->info.position); } /* Apply gain */ /* Would be better off leaving this to a volume element, as this is a naive conversion that does too many int/float conversions. However, we don't have control over the pipeline... So make it optional if the user program wants to use a volume, but do it by default so the correct volume goes out by default */ if (dec->apply_gain && outbuf && dec->r128_gain) { gsize rsize; unsigned int i, nsamples; double volume = dec->r128_gain_volume; gint16 *samples; gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE); samples = (gint16 *) omap.data; rsize = omap.size; GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume); nsamples = rsize / 2; for (i = 0; i < nsamples; ++i) { int sample = (int) (samples[i] * volume + 0.5); samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample; } gst_buffer_unmap (outbuf, &omap); } if (dec->use_inband_fec) { gst_buffer_replace (&dec->last_buffer, buffer); } res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1); if (res != GST_FLOW_OK) GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res)); done: return res; creation_failed: GST_ELEMENT_ERROR (dec, LIBRARY, INIT, ("Failed to create Opus decoder"), ("Failed to create Opus decoder (%d): %s", err, opus_strerror (err))); return GST_FLOW_ERROR; buffer_failed: GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Failed to create %u byte buffer", packet_size)); return GST_FLOW_ERROR; }
/*** * read code below * that is to say, you shouldn't read the code below, but the code that reads * stuff is below. Well, you shouldn't not read the code below, feel free * to read it of course. It's just that "read code below" is a pretty crappy * documentation string because it sounds like we're expecting you to read * the code to understand what it does, which, while true, is really not * the sort of attitude we want to be advertising. No sir. * */ static GstFlowReturn gst_flite_src_fill (GstBaseSrc * basesrc, guint64 offset, guint length, GstBuffer * buf) { GstFliteSrc *src; guint to_read, bytes_read; int ret; GstMapInfo info; guint8 *data; src = GST_FLITE_SRC_CAST (basesrc); if (G_UNLIKELY (offset != -1 && src->read_position != offset)) { off_t res; res = lseek (src->fd, offset, SEEK_SET); if (G_UNLIKELY (res < 0 || res != offset)) goto seek_failed; src->read_position = offset; } gst_buffer_map (buf, &info, GST_MAP_WRITE); data = info.data; bytes_read = 0; to_read = length; while (to_read > 0) { GST_LOG_OBJECT (src, "Reading %d bytes at offset 0x%" G_GINT64_MODIFIER "x", to_read, offset + bytes_read); errno = 0; ret = read (src->fd, data + bytes_read, to_read); if (G_UNLIKELY (ret < 0)) { if (errno == EAGAIN || errno == EINTR) continue; goto could_not_read; } /* files should eos if they read 0 and more was requested */ if (G_UNLIKELY (ret == 0)) { /* .. but first we should return any remaining data */ if (bytes_read > 0) break; goto eos; } to_read -= ret; bytes_read += ret; src->read_position += ret; } gst_buffer_unmap (buf, &info); if (bytes_read != length) gst_buffer_resize (buf, 0, bytes_read); GST_BUFFER_OFFSET (buf) = offset; GST_BUFFER_OFFSET_END (buf) = offset + bytes_read; return GST_FLOW_OK; /* ERROR */ seek_failed: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM); return GST_FLOW_ERROR; } could_not_read: { GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM); gst_buffer_unmap (buf, &info); gst_buffer_resize (buf, 0, 0); return GST_FLOW_ERROR; } eos: { GST_DEBUG ("EOS"); gst_buffer_unmap (buf, &info); gst_buffer_resize (buf, 0, 0); return GST_FLOW_EOS; } }
static GstFlowReturn gst_v4l2_do_read (GstV4l2BufferPool * pool, GstBuffer * buf) { GstFlowReturn res; GstV4l2Object *obj = pool->obj; gint amount; GstMapInfo map; gint toread; toread = obj->sizeimage; GST_LOG_OBJECT (pool, "reading %d bytes into buffer %p", toread, buf); gst_buffer_map (buf, &map, GST_MAP_WRITE); do { if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK) goto poll_error; amount = v4l2_read (obj->video_fd, map.data, toread); if (amount == toread) { break; } else if (amount == -1) { if (errno == EAGAIN || errno == EINTR) { continue; } else goto read_error; } else { /* short reads can happen if a signal interrupts the read */ continue; } } while (TRUE); GST_LOG_OBJECT (pool, "read %d bytes", amount); gst_buffer_unmap (buf, &map); gst_buffer_resize (buf, 0, amount); return GST_FLOW_OK; /* ERRORS */ poll_error: { GST_DEBUG ("poll error %s", gst_flow_get_name (res)); goto cleanup; } read_error: { GST_ELEMENT_ERROR (obj->element, RESOURCE, READ, (_("Error reading %d bytes from device '%s'."), toread, obj->videodev), GST_ERROR_SYSTEM); res = GST_FLOW_ERROR; goto cleanup; } cleanup: { gst_buffer_unmap (buf, &map); gst_buffer_resize (buf, 0, 0); return res; } }
static GstFlowReturn gst_speex_enc_encode (GstSpeexEnc * enc, GstBuffer * buf) { gint frame_size = enc->frame_size; gint bytes = frame_size * 2 * enc->channels, samples; gint outsize, written, dtx_ret = 0; GstMapInfo map; guint8 *data, *data0 = NULL, *bdata; gsize bsize, size; GstBuffer *outbuf; GstFlowReturn ret = GST_FLOW_OK; if (G_LIKELY (buf)) { gst_buffer_map (buf, &map, GST_MAP_READ); bdata = map.data; bsize = map.size; if (G_UNLIKELY (bsize % bytes)) { GST_DEBUG_OBJECT (enc, "draining; adding silence samples"); size = ((bsize / bytes) + 1) * bytes; data0 = data = g_malloc0 (size); memcpy (data, bdata, bsize); gst_buffer_unmap (buf, &map); bdata = NULL; } else { data = bdata; size = bsize; } } else { GST_DEBUG_OBJECT (enc, "nothing to drain"); goto done; } samples = size / (2 * enc->channels); speex_bits_reset (&enc->bits); /* FIXME what about dropped samples if DTS enabled ?? */ while (size) { GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)", frame_size, bytes); if (enc->channels == 2) { speex_encode_stereo_int ((gint16 *) data, frame_size, &enc->bits); } dtx_ret += speex_encode_int (enc->state, (gint16 *) data, &enc->bits); data += bytes; size -= bytes; } speex_bits_insert_terminator (&enc->bits); outsize = speex_bits_nbytes (&enc->bits); if (bdata) gst_buffer_unmap (buf, &map); #if 0 ret = gst_pad_alloc_buffer_and_set_caps (GST_AUDIO_ENCODER_SRC_PAD (enc), GST_BUFFER_OFFSET_NONE, outsize, GST_PAD_CAPS (GST_AUDIO_ENCODER_SRC_PAD (enc)), &outbuf); if ((GST_FLOW_OK != ret)) goto done; #endif outbuf = gst_buffer_new_allocate (NULL, outsize, NULL); gst_buffer_map (outbuf, &map, GST_MAP_WRITE); written = speex_bits_write (&enc->bits, (gchar *) map.data, outsize); if (G_UNLIKELY (written < outsize)) { GST_ERROR_OBJECT (enc, "short write: %d < %d bytes", written, outsize); } else if (G_UNLIKELY (written > outsize)) { GST_ERROR_OBJECT (enc, "overrun: %d > %d bytes", written, outsize); written = outsize; } gst_buffer_unmap (outbuf, &map); gst_buffer_resize (outbuf, 0, written); if (!dtx_ret) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); ret = gst_audio_encoder_finish_frame (GST_AUDIO_ENCODER (enc), outbuf, samples); done: g_free (data0); return ret; }
static GstFlowReturn gst_kate_dec_chain (GstPad * pad, GstBuffer * buf) { GstKateDec *kd = GST_KATE_DEC (gst_pad_get_parent (pad)); const kate_event *ev = NULL; GstFlowReturn rflow = GST_FLOW_OK; if (!gst_kate_util_decoder_base_update_segment (&kd->decoder, GST_ELEMENT_CAST (kd), buf)) { GST_WARNING_OBJECT (kd, "Out of segment!"); goto not_in_seg; } rflow = gst_kate_util_decoder_base_chain_kate_packet (&kd->decoder, GST_ELEMENT_CAST (kd), pad, buf, kd->srcpad, kd->srcpad, &kd->src_caps, &ev); if (G_UNLIKELY (rflow != GST_FLOW_OK)) { gst_object_unref (kd); gst_buffer_unref (buf); return rflow; } if (ev) { gchar *escaped; GstBuffer *buffer; size_t len; gboolean plain = TRUE; if (kd->remove_markup && ev->text_markup_type != kate_markup_none) { size_t len0 = ev->len + 1; escaped = g_strdup (ev->text); if (escaped) { kate_text_remove_markup (ev->text_encoding, escaped, &len0); } plain = TRUE; } else if (ev->text_markup_type == kate_markup_none) { /* no pango markup yet, escape text */ /* TODO: actually do the pango thing */ escaped = g_strdup (ev->text); plain = TRUE; } else { escaped = g_strdup (ev->text); plain = FALSE; } if (G_LIKELY (escaped)) { len = strlen (escaped); if (len > 0) { GST_DEBUG_OBJECT (kd, "kate event: %s, escaped %s", ev->text, escaped); buffer = gst_buffer_new_and_alloc (len + 1); if (G_LIKELY (buffer)) { const char *mime = plain ? "text/plain" : "text/x-pango-markup"; GstCaps *caps = gst_caps_new_empty_simple (mime); gst_caps_unref (caps); /* allocate and copy the NULs, but don't include them in passed size */ gst_buffer_fill (buffer, 0, escaped, len + 1); gst_buffer_resize (buffer, 0, len); GST_BUFFER_TIMESTAMP (buffer) = ev->start_time * GST_SECOND; GST_BUFFER_DURATION (buffer) = (ev->end_time - ev->start_time) * GST_SECOND; rflow = gst_pad_push (kd->srcpad, buffer); if (rflow == GST_FLOW_NOT_LINKED) { GST_DEBUG_OBJECT (kd, "source pad not linked, ignored"); } else if (rflow != GST_FLOW_OK) { GST_WARNING_OBJECT (kd, "failed to push buffer: %s", gst_flow_get_name (rflow)); } } else { GST_ELEMENT_ERROR (kd, STREAM, DECODE, (NULL), ("Failed to create buffer")); rflow = GST_FLOW_ERROR; } } else { GST_WARNING_OBJECT (kd, "Empty string, nothing to do"); rflow = GST_FLOW_OK; } g_free (escaped); } else { GST_ELEMENT_ERROR (kd, STREAM, DECODE, (NULL), ("Failed to allocate string")); rflow = GST_FLOW_ERROR; } // if there's a background paletted bitmap, construct a DVD SPU for it if (ev->bitmap && ev->palette) { GstBuffer *buffer = gst_kate_spu_encode_spu (kd, ev); if (buffer) { GST_BUFFER_TIMESTAMP (buffer) = ev->start_time * GST_SECOND; GST_BUFFER_DURATION (buffer) = (ev->end_time - ev->start_time) * GST_SECOND; rflow = gst_pad_push (kd->srcpad, buffer); if (rflow == GST_FLOW_NOT_LINKED) { GST_DEBUG_OBJECT (kd, "source pad not linked, ignored"); } else if (rflow != GST_FLOW_OK) { GST_WARNING_OBJECT (kd, "failed to push buffer: %s", gst_flow_get_name (rflow)); } } else { GST_ELEMENT_ERROR (kd, STREAM, DECODE, (NULL), ("failed to create SPU from paletted bitmap")); rflow = GST_FLOW_ERROR; } } } not_in_seg: gst_object_unref (kd); gst_buffer_unref (buf); return rflow; }
static GstFlowReturn gst_bz2enc_chain (GstPad * pad, GstObject * parent, GstBuffer * in) { GstFlowReturn flow = GST_FLOW_OK; GstBuffer *out; GstBz2enc *b; guint n; int bz2_ret; GstMapInfo map, omap; b = GST_BZ2ENC (parent); if (!b->ready) goto not_ready; gst_buffer_map (in, &map, GST_MAP_READ); b->stream.next_in = (char *) map.data; b->stream.avail_in = map.size; while (b->stream.avail_in) { out = gst_buffer_new_and_alloc (b->buffer_size); gst_buffer_map (out, &omap, GST_MAP_WRITE); b->stream.next_out = (char *) omap.data; b->stream.avail_out = omap.size; bz2_ret = BZ2_bzCompress (&b->stream, BZ_RUN); gst_buffer_unmap (out, &omap); if (bz2_ret != BZ_RUN_OK) goto compress_error; n = gst_buffer_get_size (out); if (b->stream.avail_out >= n) { gst_buffer_unref (out); break; } gst_buffer_resize (out, 0, n - b->stream.avail_out); n = gst_buffer_get_size (out); GST_BUFFER_OFFSET (out) = b->stream.total_out_lo32 - n; flow = gst_pad_push (b->src, out); if (flow != GST_FLOW_OK) break; b->offset += n; } done: gst_buffer_unmap (in, &map); gst_buffer_unref (in); return flow; /* ERRORS */ not_ready: { GST_ELEMENT_ERROR (b, LIBRARY, FAILED, (NULL), ("Compressor not ready.")); flow = GST_FLOW_FLUSHING; goto done; } compress_error: { GST_ELEMENT_ERROR (b, STREAM, ENCODE, (NULL), ("Failed to compress data (error code %i)", bz2_ret)); gst_bz2enc_compress_init (b); gst_buffer_unref (out); flow = GST_FLOW_ERROR; goto done; } }
static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer) { GstFlowReturn res = GST_FLOW_OK; gsize size; guint8 *data; GstBuffer *outbuf; gint16 *out_data; int n, err; int samples; unsigned int packet_size; GstBuffer *buf; GstMapInfo map, omap; if (dec->state == NULL) { /* If we did not get any headers, default to 2 channels */ if (dec->n_channels == 0) { GST_INFO_OBJECT (dec, "No header, assuming single stream"); dec->n_channels = 2; dec->sample_rate = 48000; /* default stereo mapping */ dec->channel_mapping_family = 0; dec->channel_mapping[0] = 0; dec->channel_mapping[1] = 1; dec->n_streams = 1; dec->n_stereo_streams = 1; gst_opus_dec_negotiate (dec, NULL); } GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz", dec->n_channels, dec->sample_rate); #ifndef GST_DISABLE_GST_DEBUG gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug, "Mapping table", dec->n_channels, dec->channel_mapping); #endif GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams, dec->n_stereo_streams); dec->state = opus_multistream_decoder_create (dec->sample_rate, dec->n_channels, dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err); if (!dec->state || err != OPUS_OK) goto creation_failed; } if (buffer) { GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); } else { GST_DEBUG_OBJECT (dec, "Received missing buffer"); } /* if using in-band FEC, we introdude one extra frame's delay as we need to potentially wait for next buffer to decode a missing buffer */ if (dec->use_inband_fec && !dec->primed) { GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out"); gst_buffer_replace (&dec->last_buffer, buffer); dec->primed = TRUE; goto done; } /* That's the buffer we'll be sending to the opus decoder. */ buf = (dec->use_inband_fec && gst_buffer_get_size (dec->last_buffer) > 0) ? dec->last_buffer : buffer; if (buf && gst_buffer_get_size (buf) > 0) { gst_buffer_map (buf, &map, GST_MAP_READ); data = map.data; size = map.size; GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size); } else { /* concealment data, pass NULL as the bits parameters */ GST_DEBUG_OBJECT (dec, "Using NULL buffer"); data = NULL; size = 0; } /* use maximum size (120 ms) as the number of returned samples is not constant over the stream. */ samples = 120 * dec->sample_rate / 1000; packet_size = samples * dec->n_channels * 2; outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec), packet_size); if (!outbuf) { goto buffer_failed; } gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); out_data = (gint16 *) omap.data; if (dec->use_inband_fec) { if (dec->last_buffer) { /* normal delayed decode */ GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } else { /* FEC reconstruction decode */ GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 1); } } else { /* normal decode */ GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } gst_buffer_unmap (outbuf, &omap); if (data != NULL) gst_buffer_unmap (buf, &map); if (n < 0) { GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL)); gst_buffer_unref (outbuf); return GST_FLOW_ERROR; } GST_DEBUG_OBJECT (dec, "decoded %d samples", n); gst_buffer_set_size (outbuf, n * 2 * dec->n_channels); /* Skip any samples that need skipping */ if (dec->pre_skip > 0) { guint scaled_pre_skip = dec->pre_skip * dec->sample_rate / 48000; guint skip = scaled_pre_skip > n ? n : scaled_pre_skip; guint scaled_skip = skip * 48000 / dec->sample_rate; gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1); dec->pre_skip -= scaled_skip; GST_INFO_OBJECT (dec, "Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip, scaled_skip, dec->pre_skip); } if (gst_buffer_get_size (outbuf) == 0) { gst_buffer_unref (outbuf); outbuf = NULL; } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) { gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16, dec->n_channels, dec->opus_pos, dec->info.position); } /* Apply gain */ /* Would be better off leaving this to a volume element, as this is a naive conversion that does too many int/float conversions. However, we don't have control over the pipeline... So make it optional if the user program wants to use a volume, but do it by default so the correct volume goes out by default */ if (dec->apply_gain && outbuf && dec->r128_gain) { gsize rsize; unsigned int i, nsamples; double volume = dec->r128_gain_volume; gint16 *samples; gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE); samples = (gint16 *) omap.data; rsize = omap.size; GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume); nsamples = rsize / 2; for (i = 0; i < nsamples; ++i) { int sample = (int) (samples[i] * volume + 0.5); samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample; } gst_buffer_unmap (outbuf, &omap); } if (dec->use_inband_fec) { gst_buffer_replace (&dec->last_buffer, buffer); } res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1); if (res != GST_FLOW_OK) GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res)); done: return res; creation_failed: GST_ERROR_OBJECT (dec, "Failed to create Opus decoder: %d", err); return GST_FLOW_ERROR; buffer_failed: GST_ERROR_OBJECT (dec, "Failed to create %u byte buffer", packet_size); return GST_FLOW_ERROR; }
static GstFlowReturn gst_tcp_mix_src_pad_read (GstTCPMixSrcPad * pad, GstBuffer ** outbuf) { GstTCPMixSrc *src = GST_TCP_MIX_SRC (GST_PAD_PARENT (pad)); gssize avail, receivedBytes; GstMapInfo map; GError *err = NULL; /* if we have a client, wait for read */ GST_LOG_OBJECT (pad, "asked for a buffer"); if (!pad->client) { if (src->mode == MODE_LOOP) goto loop_read; else goto no_client; } /* read the buffer header */ read_available_bytes: avail = g_socket_get_available_bytes (pad->client); if (avail < 0) { goto socket_get_available_bytes_error; } else if (avail == 0) { GIOCondition condition; if (!g_socket_condition_wait (pad->client, G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP, pad->cancellable, &err)) goto socket_condition_wait_error; condition = g_socket_condition_check (pad->client, G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP); if ((condition & G_IO_ERR)) goto socket_condition_error; else if ((condition & G_IO_HUP)) goto socket_condition_hup; avail = g_socket_get_available_bytes (pad->client); if (avail < 0) goto socket_get_available_bytes_error; } if (0 < avail) { gsize readBytes = MIN (avail, MAX_READ_SIZE); *outbuf = gst_buffer_new_and_alloc (readBytes); gst_buffer_map (*outbuf, &map, GST_MAP_READWRITE); receivedBytes = g_socket_receive (pad->client, (gchar *) map.data, readBytes, pad->cancellable, &err); } else { /* Connection closed */ receivedBytes = 0; *outbuf = NULL; } if (receivedBytes == 0) goto socket_connection_closed; else if (receivedBytes < 0) goto socket_receive_error; gst_buffer_unmap (*outbuf, &map); gst_buffer_resize (*outbuf, 0, receivedBytes); #if 0 GST_LOG_OBJECT (pad, "Returning buffer from _get of size %" G_GSIZE_FORMAT ", ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT, gst_buffer_get_size (*outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (*outbuf)), GST_BUFFER_OFFSET (*outbuf), GST_BUFFER_OFFSET_END (*outbuf)); #endif g_clear_error (&err); return GST_FLOW_OK; /* Handling Errors */ no_client: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("No client socket (%s)", GST_PAD_NAME (pad))); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_get_available_bytes_error: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Failed to get available bytes from socket")); gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_condition_wait_error: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Select failed: %s", err->message)); g_clear_error (&err); gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_condition_error: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Socket in error state")); *outbuf = NULL; gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_condition_hup: { GST_DEBUG_OBJECT (pad, "Connection closed"); *outbuf = NULL; gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_EOS; } socket_connection_closed: { GST_DEBUG_OBJECT (pad, "Connection closed"); if (*outbuf) { gst_buffer_unmap (*outbuf, &map); gst_buffer_unref (*outbuf); } *outbuf = NULL; gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_EOS; } socket_receive_error: { gst_buffer_unmap (*outbuf, &map); gst_buffer_unref (*outbuf); *outbuf = NULL; if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { GST_DEBUG_OBJECT (pad, "Cancelled reading from socket"); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_FLUSHING; } else { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Failed to read from socket: %s", err->message)); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } } loop_read: { #if 0 GstEvent *event = gst_event_new_flush_start (); if (!gst_pad_push_event (pad, event)) { GST_ERROR_OBJECT (src, "Failed to flush data on %s.%s", GST_ELEMENT_NAME (src), GST_PAD_NAME (pad)); } #endif #if 0 GST_DEBUG_OBJECT (pad, "Looping"); #endif if (src->fill == FILL_NONE) { gst_tcp_mix_src_pad_wait_for_client (pad); goto read_available_bytes; } enum { buffer_size = 1024 }; *outbuf = gst_buffer_new_and_alloc (buffer_size); switch (src->fill) { case FILL_ZERO: break; case FILL_RAND: { guchar *p; gst_buffer_map (*outbuf, &map, GST_MAP_READWRITE); for (p = map.data; p < map.data + buffer_size; p += 4) { *((int *) p) = rand (); } } break; } return GST_FLOW_OK; } }
/* * Read a new buffer from src->reqoffset, takes care of events * and seeking and such. */ static GstFlowReturn gst_rtmp_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer) { GstRTMPSrc *src; GstBuffer *buf; GstMapInfo map; guint8 *data; guint todo; gsize bsize; int read; int size; src = GST_RTMP_SRC (pushsrc); g_return_val_if_fail (src->rtmp != NULL, GST_FLOW_ERROR); size = GST_BASE_SRC_CAST (pushsrc)->blocksize; GST_DEBUG ("reading from %" G_GUINT64_FORMAT ", size %u", src->cur_offset, size); buf = gst_buffer_new_allocate (NULL, size, NULL); if (G_UNLIKELY (buf == NULL)) { GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", size); return GST_FLOW_ERROR; } bsize = todo = size; gst_buffer_map (buf, &map, GST_MAP_WRITE); data = map.data; read = bsize = 0; while (todo > 0) { read = RTMP_Read (src->rtmp, (char *) data, todo); if (G_UNLIKELY (read == 0 && todo == size)) { goto eos; } else if (G_UNLIKELY (read == 0)) { todo = 0; break; } if (G_UNLIKELY (read < 0)) goto read_failed; if (read < todo) { data += read; todo -= read; bsize += read; } else { bsize += todo; todo = 0; } GST_LOG (" got size %d", read); } gst_buffer_unmap (buf, &map); gst_buffer_resize (buf, 0, bsize); if (src->discont) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); src->discont = FALSE; } GST_BUFFER_TIMESTAMP (buf) = src->last_timestamp; GST_BUFFER_OFFSET (buf) = src->cur_offset; src->cur_offset += size; if (src->last_timestamp == GST_CLOCK_TIME_NONE) src->last_timestamp = src->rtmp->m_mediaStamp * GST_MSECOND; else src->last_timestamp = MAX (src->last_timestamp, src->rtmp->m_mediaStamp * GST_MSECOND); GST_LOG_OBJECT (src, "Created buffer of size %u at %" G_GINT64_FORMAT " with timestamp %" GST_TIME_FORMAT, size, GST_BUFFER_OFFSET (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); /* we're done, return the buffer */ *buffer = buf; return GST_FLOW_OK; read_failed: { gst_buffer_unref (buf); GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL), ("Failed to read data")); return GST_FLOW_ERROR; } eos: { gst_buffer_unref (buf); GST_DEBUG_OBJECT (src, "Reading data gave EOS"); return GST_FLOW_EOS; } }
static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) { GstRtpVRawPay *rtpvrawpay; GstFlowReturn ret = GST_FLOW_OK; guint line, offset; guint8 *yp, *up, *vp; guint ystride, uvstride; guint pgroup; guint mtu; guint width, height; gint field; GstVideoFrame frame; gint interlaced; GstRTPBuffer rtp = { NULL, }; rtpvrawpay = GST_RTP_VRAW_PAY (payload); gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ); GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes", gst_buffer_get_size (buffer)); /* get pointer and strides of the planes */ yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0); up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1); vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2); ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0); uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1); mtu = GST_RTP_BASE_PAYLOAD_MTU (payload); /* amount of bytes for one pixel */ pgroup = rtpvrawpay->pgroup; width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo); height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo); interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo); /* start with line 0, offset 0 */ for (field = 0; field < 1 + interlaced; field++) { line = field; offset = 0; /* write all lines */ while (line < height) { guint left; GstBuffer *out; guint8 *outdata, *headers; gboolean next_line; guint length, cont, pixels; /* get the max allowed payload length size, we try to fill the complete MTU */ left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0); out = gst_rtp_buffer_new_allocate (left, 0, 0); if (field == 0) { GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer); } else { GST_BUFFER_TIMESTAMP (out) = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer) / 2; } gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp); outdata = gst_rtp_buffer_get_payload (&rtp); GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left, mtu); /* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Extended Sequence Number | Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |F| Line No |C| Offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Length |F| Line No | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |C| Offset | . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ . * . . * . Two (partial) lines of video data . * . . * +---------------------------------------------------------------+ */ /* need 2 bytes for the extended sequence number */ *outdata++ = 0; *outdata++ = 0; left -= 2; /* the headers start here */ headers = outdata; /* while we can fit at least one header and one pixel */ while (left > (6 + pgroup)) { /* we need a 6 bytes header */ left -= 6; /* get how may bytes we need for the remaining pixels */ pixels = width - offset; length = (pixels * pgroup) / rtpvrawpay->xinc; if (left >= length) { /* pixels and header fit completely, we will write them and skip to the * next line. */ next_line = TRUE; } else { /* line does not fit completely, see how many pixels fit */ pixels = (left / pgroup) * rtpvrawpay->xinc; length = (pixels * pgroup) / rtpvrawpay->xinc; next_line = FALSE; } GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length, pixels); left -= length; /* write length */ *outdata++ = (length >> 8) & 0xff; *outdata++ = length & 0xff; /* write line no */ *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80); *outdata++ = line & 0xff; if (next_line) { /* go to next line we do this here to make the check below easier */ line += rtpvrawpay->yinc; } /* calculate continuation marker */ cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00; /* write offset and continuation marker */ *outdata++ = ((offset >> 8) & 0x7f) | cont; *outdata++ = offset & 0xff; if (next_line) { /* reset offset */ offset = 0; GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line); } else { offset += pixels; GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset); } if (!cont) break; } GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes", (guint) (outdata - headers)); /* second pass, read headers and write the data */ while (TRUE) { guint offs, lin; /* read length and cont */ length = (headers[0] << 8) | headers[1]; lin = ((headers[2] & 0x7f) << 8) | headers[3]; offs = ((headers[4] & 0x7f) << 8) | headers[5]; cont = headers[4] & 0x80; pixels = length / pgroup; headers += 6; GST_LOG_OBJECT (payload, "writing length %u, line %u, offset %u, cont %d", length, lin, offs, cont); switch (GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo)) { case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_UYVY: case GST_VIDEO_FORMAT_UYVP: offs /= rtpvrawpay->xinc; memcpy (outdata, yp + (lin * ystride) + (offs * pgroup), length); outdata += length; break; case GST_VIDEO_FORMAT_AYUV: { gint i; guint8 *datap; datap = yp + (lin * ystride) + (offs * 4); for (i = 0; i < pixels; i++) { *outdata++ = datap[2]; *outdata++ = datap[1]; *outdata++ = datap[3]; datap += 4; } break; } case GST_VIDEO_FORMAT_I420: { gint i; guint uvoff; guint8 *yd1p, *yd2p, *udp, *vdp; yd1p = yp + (lin * ystride) + (offs); yd2p = yd1p + ystride; uvoff = (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *yd1p++; *outdata++ = *yd1p++; *outdata++ = *yd2p++; *outdata++ = *yd2p++; *outdata++ = *udp++; *outdata++ = *vdp++; } break; } case GST_VIDEO_FORMAT_Y41B: { gint i; guint uvoff; guint8 *ydp, *udp, *vdp; ydp = yp + (lin * ystride) + offs; uvoff = (lin / rtpvrawpay->yinc * uvstride) + (offs / rtpvrawpay->xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *udp++; *outdata++ = *ydp++; *outdata++ = *ydp++; *outdata++ = *vdp++; *outdata++ = *ydp++; *outdata++ = *ydp++; } break; } default: gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto unknown_sampling; } if (!cont) break; } if (line >= height) { GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker"); gst_rtp_buffer_set_marker (&rtp, TRUE); } gst_rtp_buffer_unmap (&rtp); if (left > 0) { GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left); gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left); } /* push buffer */ ret = gst_rtp_base_payload_push (payload, out); } } gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return ret; /* ERRORS */ unknown_sampling: { GST_ELEMENT_ERROR (payload, STREAM, FORMAT, (NULL), ("unimplemented sampling")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } }
static GstFlowReturn gst_flac_tag_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstFlacTag *tag; GstFlowReturn ret; GstMapInfo map; gsize size; ret = GST_FLOW_OK; tag = GST_FLAC_TAG (parent); gst_adapter_push (tag->adapter, buffer); /* Initial state, we don't even know if we are dealing with a flac file */ if (tag->state == GST_FLAC_TAG_STATE_INIT) { GstBuffer *id_buffer; if (gst_adapter_available (tag->adapter) < sizeof (FLAC_MAGIC)) goto cleanup; id_buffer = gst_adapter_take_buffer (tag->adapter, FLAC_MAGIC_SIZE); GST_DEBUG_OBJECT (tag, "looking for " FLAC_MAGIC " identifier"); if (gst_buffer_memcmp (id_buffer, 0, FLAC_MAGIC, FLAC_MAGIC_SIZE) == 0) { GST_DEBUG_OBJECT (tag, "pushing " FLAC_MAGIC " identifier buffer"); ret = gst_pad_push (tag->srcpad, id_buffer); if (ret != GST_FLOW_OK) goto cleanup; tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { /* FIXME: does that work well with FLAC files containing ID3v2 tags ? */ gst_buffer_unref (id_buffer); GST_ELEMENT_ERROR (tag, STREAM, WRONG_TYPE, (NULL), (NULL)); ret = GST_FLOW_ERROR; } } /* The fLaC magic string has been skipped, try to detect the beginning * of a metadata block */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_BLOCKS) { guint type; gboolean is_last; const guint8 *block_header; g_assert (tag->metadata_block_size == 0); g_assert (tag->metadata_last_block == FALSE); /* The header of a flac metadata block is 4 bytes long: * 1st bit: indicates whether this is the last metadata info block * 7 next bits: 4 if vorbis comment block * 24 next bits: size of the metadata to follow (big endian) */ if (gst_adapter_available (tag->adapter) < 4) goto cleanup; block_header = gst_adapter_map (tag->adapter, 4); is_last = ((block_header[0] & 0x80) == 0x80); type = block_header[0] & 0x7F; size = (block_header[1] << 16) | (block_header[2] << 8) | block_header[3]; gst_adapter_unmap (tag->adapter); /* The 4 bytes long header isn't included in the metadata size */ tag->metadata_block_size = size + 4; tag->metadata_last_block = is_last; GST_DEBUG_OBJECT (tag, "got metadata block: %" G_GSIZE_FORMAT " bytes, type %d, " "is vorbiscomment: %d, is last: %d", size, type, (type == 0x04), is_last); /* Metadata blocks of type 4 are vorbis comment blocks */ if (type == 0x04) { tag->state = GST_FLAC_TAG_STATE_VC_METADATA_BLOCK; } else { tag->state = GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK; } } /* Reads a metadata block */ if ((tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) || (tag->state == GST_FLAC_TAG_STATE_VC_METADATA_BLOCK)) { GstBuffer *metadata_buffer; if (gst_adapter_available (tag->adapter) < tag->metadata_block_size) goto cleanup; metadata_buffer = gst_adapter_take_buffer (tag->adapter, tag->metadata_block_size); /* clear the is-last flag, as the last metadata block will * be the vorbis comment block which we will build ourselves. */ gst_buffer_map (metadata_buffer, &map, GST_MAP_READWRITE); map.data[0] &= (~0x80); gst_buffer_unmap (metadata_buffer, &map); if (tag->state == GST_FLAC_TAG_STATE_WRITING_METADATA_BLOCK) { GST_DEBUG_OBJECT (tag, "pushing metadata block buffer"); ret = gst_pad_push (tag->srcpad, metadata_buffer); if (ret != GST_FLOW_OK) goto cleanup; } else { tag->vorbiscomment = metadata_buffer; } tag->metadata_block_size = 0; tag->state = GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK; } /* This state is mainly used to be able to stop as soon as we read * a vorbiscomment block from the flac file if we are in an only output * tags mode */ if (tag->state == GST_FLAC_TAG_STATE_METADATA_NEXT_BLOCK) { /* Check if in the previous iteration we read a vorbis comment metadata * block, and stop now if the user only wants to read tags */ if (tag->vorbiscomment != NULL) { guint8 id_data[4]; /* We found some tags, try to parse them and notify the other elements * that we encountered some tags */ GST_DEBUG_OBJECT (tag, "emitting vorbiscomment tags"); gst_buffer_extract (tag->vorbiscomment, 0, id_data, 4); tag->tags = gst_tag_list_from_vorbiscomment_buffer (tag->vorbiscomment, id_data, 4, NULL); if (tag->tags != NULL) { gst_pad_push_event (tag->srcpad, gst_event_new_tag (gst_tag_list_copy (tag->tags))); } gst_buffer_unref (tag->vorbiscomment); tag->vorbiscomment = NULL; } /* Skip to next state */ if (tag->metadata_last_block == FALSE) { tag->state = GST_FLAC_TAG_STATE_METADATA_BLOCKS; } else { tag->state = GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT; } } /* Creates a vorbis comment block from the metadata which was set * on the gstreamer element, and add it to the flac stream */ if (tag->state == GST_FLAC_TAG_STATE_ADD_VORBIS_COMMENT) { GstBuffer *buffer; const GstTagList *user_tags; GstTagList *merged_tags; /* merge the tag lists */ user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (tag)); if (user_tags != NULL) { merged_tags = gst_tag_list_merge (user_tags, tag->tags, gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (tag))); } else { merged_tags = gst_tag_list_copy (tag->tags); } if (merged_tags == NULL) { /* If we get a NULL list of tags, we must generate a padding block * which is marked as the last metadata block, otherwise we'll * end up with a corrupted flac file. */ GST_WARNING_OBJECT (tag, "No tags found"); buffer = gst_buffer_new_and_alloc (12); if (buffer == NULL) goto no_buffer; gst_buffer_map (buffer, &map, GST_MAP_WRITE); memset (map.data, 0, map.size); map.data[0] = 0x81; /* 0x80 = Last metadata block, * 0x01 = padding block */ gst_buffer_unmap (buffer, &map); } else { guchar header[4]; guint8 fbit[1]; memset (header, 0, sizeof (header)); header[0] = 0x84; /* 0x80 = Last metadata block, * 0x04 = vorbiscomment block */ buffer = gst_tag_list_to_vorbiscomment_buffer (merged_tags, header, sizeof (header), NULL); GST_DEBUG_OBJECT (tag, "Writing tags %" GST_PTR_FORMAT, merged_tags); gst_tag_list_free (merged_tags); if (buffer == NULL) goto no_comment; size = gst_buffer_get_size (buffer); if ((size < 4) || ((size - 4) > 0xFFFFFF)) goto comment_too_long; fbit[0] = 1; /* Get rid of the framing bit at the end of the vorbiscomment buffer * if it exists since libFLAC seems to lose sync because of this * bit in gstflacdec */ if (gst_buffer_memcmp (buffer, size - 1, fbit, 1) == 0) { buffer = gst_buffer_make_writable (buffer); gst_buffer_resize (buffer, 0, size - 1); } } /* The 4 byte metadata block header isn't accounted for in the total * size of the metadata block */ gst_buffer_map (buffer, &map, GST_MAP_WRITE); map.data[1] = (((map.size - 4) & 0xFF0000) >> 16); map.data[2] = (((map.size - 4) & 0x00FF00) >> 8); map.data[3] = ((map.size - 4) & 0x0000FF); gst_buffer_unmap (buffer, &map); GST_DEBUG_OBJECT (tag, "pushing %" G_GSIZE_FORMAT " byte vorbiscomment " "buffer", map.size); ret = gst_pad_push (tag->srcpad, buffer); if (ret != GST_FLOW_OK) { goto cleanup; } tag->state = GST_FLAC_TAG_STATE_AUDIO_DATA; }
static GstFlowReturn gst_rdt_depay_handle_data (GstRDTDepay * rdtdepay, GstClockTime outtime, GstRDTPacket * packet) { GstFlowReturn ret; GstBuffer *outbuf; GstMapInfo outmap; guint8 *data, *outdata; guint size; guint16 stream_id; guint32 timestamp; gint gap; guint16 seqnum; guint8 flags; guint16 outflags; /* get pointers to the packet data */ data = gst_rdt_packet_data_map (packet, &size); outbuf = gst_buffer_new_and_alloc (12 + size); GST_BUFFER_TIMESTAMP (outbuf) = outtime; GST_DEBUG_OBJECT (rdtdepay, "have size %u", size); /* copy over some things */ stream_id = gst_rdt_packet_data_get_stream_id (packet); timestamp = gst_rdt_packet_data_get_timestamp (packet); flags = gst_rdt_packet_data_get_flags (packet); seqnum = gst_rdt_packet_data_get_seq (packet); GST_DEBUG_OBJECT (rdtdepay, "stream_id %u, timestamp %u, seqnum %d, flags %d", stream_id, timestamp, seqnum, flags); if (rdtdepay->next_seqnum != -1) { gap = gst_rdt_buffer_compare_seqnum (seqnum, rdtdepay->next_seqnum); /* if we have no gap, all is fine */ if (G_UNLIKELY (gap != 0)) { GST_LOG_OBJECT (rdtdepay, "got packet %u, expected %u, gap %d", seqnum, rdtdepay->next_seqnum, gap); if (gap < 0) { /* seqnum > next_seqnum, we are missing some packets, this is always a * DISCONT. */ GST_LOG_OBJECT (rdtdepay, "%d missing packets", gap); rdtdepay->discont = TRUE; } else { /* seqnum < next_seqnum, we have seen this packet before or the sender * could be restarted. If the packet is not too old, we throw it away as * a duplicate, otherwise we mark discont and continue. 100 misordered * packets is a good threshold. See also RFC 4737. */ if (gap < 100) goto dropping; GST_LOG_OBJECT (rdtdepay, "%d > 100, packet too old, sender likely restarted", gap); rdtdepay->discont = TRUE; } } } rdtdepay->next_seqnum = (seqnum + 1); if (rdtdepay->next_seqnum == 0xff00) rdtdepay->next_seqnum = 0; if ((flags & 1) == 0) outflags = 2; else outflags = 0; gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE); outdata = outmap.data; GST_WRITE_UINT16_BE (outdata + 0, 0); /* version */ GST_WRITE_UINT16_BE (outdata + 2, size + 12); /* length */ GST_WRITE_UINT16_BE (outdata + 4, stream_id); /* stream */ GST_WRITE_UINT32_BE (outdata + 6, timestamp); /* timestamp */ GST_WRITE_UINT16_BE (outdata + 10, outflags); /* flags */ memcpy (outdata + 12, data, size); gst_buffer_unmap (outbuf, &outmap); gst_buffer_resize (outbuf, 0, 12 + size); gst_rdt_packet_data_unmap (packet); GST_DEBUG_OBJECT (rdtdepay, "Pushing packet, outtime %" GST_TIME_FORMAT, GST_TIME_ARGS (outtime)); ret = gst_rdt_depay_push (rdtdepay, outbuf); return ret; /* ERRORS */ dropping: { GST_WARNING_OBJECT (rdtdepay, "%d <= 100, dropping old packet", gap); return GST_FLOW_OK; } }
static GstFlowReturn gst_udpsrc_create (GstPushSrc * psrc, GstBuffer ** buf) { GstUDPSrc *udpsrc; GstBuffer *outbuf = NULL; GSocketAddress *saddr = NULL; gint flags = G_SOCKET_MSG_NONE; gboolean try_again; GError *err = NULL; gssize res; gsize offset; udpsrc = GST_UDPSRC_CAST (psrc); if (!gst_udpsrc_ensure_mem (udpsrc)) goto memory_alloc_error; retry: do { gint64 timeout; try_again = FALSE; if (udpsrc->timeout) timeout = udpsrc->timeout / 1000; else timeout = -1; GST_LOG_OBJECT (udpsrc, "doing select, timeout %" G_GINT64_FORMAT, timeout); if (!g_socket_condition_timed_wait (udpsrc->used_socket, G_IO_IN | G_IO_PRI, timeout, udpsrc->cancellable, &err)) { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { goto stopped; } else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_TIMED_OUT)) { g_clear_error (&err); /* timeout, post element message */ gst_element_post_message (GST_ELEMENT_CAST (udpsrc), gst_message_new_element (GST_OBJECT_CAST (udpsrc), gst_structure_new ("GstUDPSrcTimeout", "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL))); } else { goto select_error; } try_again = TRUE; } } while (G_UNLIKELY (try_again)); if (saddr != NULL) { g_object_unref (saddr); saddr = NULL; } res = g_socket_receive_message (udpsrc->used_socket, &saddr, udpsrc->vec, 2, NULL, NULL, &flags, udpsrc->cancellable, &err); if (G_UNLIKELY (res < 0)) { /* EHOSTUNREACH for a UDP socket means that a packet sent with udpsink * generated a "port unreachable" ICMP response. We ignore that and try * again. */ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE)) { g_clear_error (&err); goto retry; } goto receive_error; } /* remember maximum packet size */ if (res > udpsrc->max_size) udpsrc->max_size = res; outbuf = gst_buffer_new (); /* append first memory chunk to buffer */ gst_buffer_append_memory (outbuf, udpsrc->mem); /* if the packet didn't fit into the first chunk, add second one as well */ if (res > udpsrc->map.size) { gst_buffer_append_memory (outbuf, udpsrc->mem_max); gst_memory_unmap (udpsrc->mem_max, &udpsrc->map_max); udpsrc->vec[1].buffer = NULL; udpsrc->vec[1].size = 0; udpsrc->mem_max = NULL; } /* make sure we allocate a new chunk next time (we do this only here because * we look at map.size to see if the second memory chunk is needed above) */ gst_memory_unmap (udpsrc->mem, &udpsrc->map); udpsrc->vec[0].buffer = NULL; udpsrc->vec[0].size = 0; udpsrc->mem = NULL; offset = udpsrc->skip_first_bytes; if (G_UNLIKELY (offset > 0 && res < offset)) goto skip_error; gst_buffer_resize (outbuf, offset, res - offset); /* use buffer metadata so receivers can also track the address */ if (saddr) { gst_buffer_add_net_address_meta (outbuf, saddr); g_object_unref (saddr); saddr = NULL; } GST_LOG_OBJECT (udpsrc, "read packet of %d bytes", (int) res); *buf = GST_BUFFER_CAST (outbuf); return GST_FLOW_OK; /* ERRORS */ memory_alloc_error: { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("Failed to allocate or map memory")); return GST_FLOW_ERROR; } select_error: { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("select error: %s", err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } stopped: { GST_DEBUG ("stop called"); g_clear_error (&err); return GST_FLOW_FLUSHING; } receive_error: { if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { g_clear_error (&err); return GST_FLOW_FLUSHING; } else { GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL), ("receive error %" G_GSSIZE_FORMAT ": %s", res, err->message)); g_clear_error (&err); return GST_FLOW_ERROR; } } skip_error: { gst_buffer_unref (outbuf); GST_ELEMENT_ERROR (udpsrc, STREAM, DECODE, (NULL), ("UDP buffer to small to skip header")); return GST_FLOW_ERROR; } }
static gboolean gst_bz2enc_event (GstPad * pad, GstObject * parent, GstEvent * e) { GstBz2enc *b; gboolean ret; b = GST_BZ2ENC (parent); switch (GST_EVENT_TYPE (e)) { case GST_EVENT_EOS:{ GstFlowReturn flow; int r = BZ_FINISH_OK; do { GstBuffer *out; GstMapInfo omap; guint n; out = gst_buffer_new_and_alloc (b->buffer_size); gst_buffer_map (out, &omap, GST_MAP_WRITE); b->stream.next_out = (char *) omap.data; b->stream.avail_out = omap.size; r = BZ2_bzCompress (&b->stream, BZ_FINISH); gst_buffer_unmap (out, &omap); if ((r != BZ_FINISH_OK) && (r != BZ_STREAM_END)) { GST_ELEMENT_ERROR (b, STREAM, ENCODE, (NULL), ("Failed to finish to compress (error code %i).", r)); gst_buffer_unref (out); break; } n = gst_buffer_get_size (out); if (b->stream.avail_out >= n) { gst_buffer_unref (out); break; } gst_buffer_resize (out, 0, n - b->stream.avail_out); n = gst_buffer_get_size (out); GST_BUFFER_OFFSET (out) = b->stream.total_out_lo32 - n; flow = gst_pad_push (b->src, out); if (flow != GST_FLOW_OK) { GST_DEBUG_OBJECT (b, "push on EOS failed: %s", gst_flow_get_name (flow)); break; } } while (r != BZ_STREAM_END); ret = gst_pad_event_default (pad, parent, e); if (r != BZ_STREAM_END || flow != GST_FLOW_OK) ret = FALSE; gst_bz2enc_compress_init (b); break; } default: ret = gst_pad_event_default (pad, parent, e); break; } return ret; }
static GstFlowReturn gst_mim_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * in) { GstMimEnc *mimenc = GST_MIM_ENC (parent); GstBuffer *out = NULL; GstMapInfo in_map; GstMapInfo out_map; GstFlowReturn res = GST_FLOW_OK; gboolean keyframe; gint buffer_size; GST_OBJECT_LOCK (mimenc); gst_buffer_map (in, &in_map, GST_MAP_READ); out = gst_buffer_new_and_alloc (mimenc->buffer_size + TCP_HEADER_SIZE); gst_buffer_map (out, &out_map, GST_MAP_READWRITE); GST_BUFFER_TIMESTAMP (out) = gst_segment_to_running_time (&mimenc->segment, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (in)); mimenc->last_buffer = GST_BUFFER_TIMESTAMP (out); buffer_size = mimenc->buffer_size; keyframe = (mimenc->frames % MAX_INTERFRAMES) == 0 ? TRUE : FALSE; if (!mimic_encode_frame (mimenc->enc, in_map.data, out_map.data + TCP_HEADER_SIZE, &buffer_size, keyframe)) { gst_buffer_unmap (in, &in_map); gst_buffer_unmap (out, &out_map); gst_buffer_unref (out); GST_ELEMENT_ERROR (mimenc, STREAM, ENCODE, (NULL), ("mimic_encode_frame error")); res = GST_FLOW_ERROR; goto out_unlock; } gst_buffer_unmap (in, &in_map); if (!keyframe) GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT); GST_LOG_OBJECT (mimenc, "incoming buf size %d, encoded size %d", gst_buffer_get_size (in), gst_buffer_get_size (out)); ++mimenc->frames; /* now let's create that tcp header */ gst_mim_enc_create_tcp_header (mimenc, out_map.data, buffer_size, GST_BUFFER_TIMESTAMP (out), keyframe, FALSE); gst_buffer_unmap (out, &out_map); gst_buffer_resize (out, 0, buffer_size + TCP_HEADER_SIZE); GST_OBJECT_UNLOCK (mimenc); res = gst_pad_push (mimenc->srcpad, out); out: gst_buffer_unref (in); return res; out_unlock: GST_OBJECT_UNLOCK (mimenc); goto out; }
static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) { GstRtpVRawPay *rtpvrawpay; GstFlowReturn ret = GST_FLOW_OK; gfloat packets_per_packline; guint pgroups_per_packet; guint packlines_per_list, buffers_per_list; guint lines_delay; /* after how many packed lines we push out a buffer list */ guint last_line; /* last pack line number we pushed out a buffer list */ guint line, offset; guint8 *p0, *yp, *up, *vp; guint ystride, uvstride; guint xinc, yinc; guint pgroup; guint mtu; guint width, height; gint field, fields; GstVideoFormat format; GstVideoFrame frame; gint interlaced; gboolean use_buffer_lists; GstBufferList *list = NULL; GstRTPBuffer rtp = { NULL, }; rtpvrawpay = GST_RTP_VRAW_PAY (payload); gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ); GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes", gst_buffer_get_size (buffer)); /* get pointer and strides of the planes */ p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0); yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0); up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1); vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2); ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0); uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1); mtu = GST_RTP_BASE_PAYLOAD_MTU (payload); /* amount of bytes for one pixel */ pgroup = rtpvrawpay->pgroup; width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo); height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo); interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo); format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo); yinc = rtpvrawpay->yinc; xinc = rtpvrawpay->xinc; /* after how many packed lines we push out a buffer list */ lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame); /* calculate how many buffers we expect to store in a single buffer list */ pgroups_per_packet = (mtu - (12 + 14)) / pgroup; packets_per_packline = width / (xinc * pgroups_per_packet * 1.0); packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame); buffers_per_list = packlines_per_list * packets_per_packline; buffers_per_list = GST_ROUND_UP_8 (buffers_per_list); use_buffer_lists = (rtpvrawpay->chunks_per_frame < (height / yinc)); fields = 1 + interlaced; /* start with line 0, offset 0 */ for (field = 0; field < fields; field++) { line = field; offset = 0; last_line = 0; if (use_buffer_lists) list = gst_buffer_list_new_sized (buffers_per_list); /* write all lines */ while (line < height) { guint left, pack_line; GstBuffer *out; guint8 *outdata, *headers; gboolean next_line, complete = FALSE; guint length, cont, pixels; /* get the max allowed payload length size, we try to fill the complete MTU */ left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0); out = gst_rtp_buffer_new_allocate (left, 0, 0); if (field == 0) { GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer); } else { GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) + GST_BUFFER_DURATION (buffer) / 2; } gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp); outdata = gst_rtp_buffer_get_payload (&rtp); GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left, mtu); /* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Extended Sequence Number | Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |F| Line No |C| Offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Length |F| Line No | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |C| Offset | . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ . * . . * . Two (partial) lines of video data . * . . * +---------------------------------------------------------------+ */ /* need 2 bytes for the extended sequence number */ *outdata++ = 0; *outdata++ = 0; left -= 2; /* the headers start here */ headers = outdata; /* make sure we can fit at least *one* header and pixel */ if (!(left > (6 + pgroup))) { gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto too_small; } /* while we can fit at least one header and one pixel */ while (left > (6 + pgroup)) { /* we need a 6 bytes header */ left -= 6; /* get how may bytes we need for the remaining pixels */ pixels = width - offset; length = (pixels * pgroup) / xinc; if (left >= length) { /* pixels and header fit completely, we will write them and skip to the * next line. */ next_line = TRUE; } else { /* line does not fit completely, see how many pixels fit */ pixels = (left / pgroup) * xinc; length = (pixels * pgroup) / xinc; next_line = FALSE; } GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length, pixels); left -= length; /* write length */ *outdata++ = (length >> 8) & 0xff; *outdata++ = length & 0xff; /* write line no */ *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80); *outdata++ = line & 0xff; if (next_line) { /* go to next line we do this here to make the check below easier */ line += yinc; } /* calculate continuation marker */ cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00; /* write offset and continuation marker */ *outdata++ = ((offset >> 8) & 0x7f) | cont; *outdata++ = offset & 0xff; if (next_line) { /* reset offset */ offset = 0; GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line); } else { offset += pixels; GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset); } if (!cont) break; } GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes", (guint) (outdata - headers)); /* second pass, read headers and write the data */ while (TRUE) { guint offs, lin; /* read length and cont */ length = (headers[0] << 8) | headers[1]; lin = ((headers[2] & 0x7f) << 8) | headers[3]; offs = ((headers[4] & 0x7f) << 8) | headers[5]; cont = headers[4] & 0x80; pixels = length / pgroup; headers += 6; GST_LOG_OBJECT (payload, "writing length %u, line %u, offset %u, cont %d", length, lin, offs, cont); switch (format) { case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_UYVY: case GST_VIDEO_FORMAT_UYVP: offs /= xinc; memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length); outdata += length; break; case GST_VIDEO_FORMAT_AYUV: { gint i; guint8 *datap; datap = p0 + (lin * ystride) + (offs * 4); for (i = 0; i < pixels; i++) { *outdata++ = datap[2]; *outdata++ = datap[1]; *outdata++ = datap[3]; datap += 4; } break; } case GST_VIDEO_FORMAT_I420: { gint i; guint uvoff; guint8 *yd1p, *yd2p, *udp, *vdp; yd1p = yp + (lin * ystride) + (offs); yd2p = yd1p + ystride; uvoff = (lin / yinc * uvstride) + (offs / xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *yd1p++; *outdata++ = *yd1p++; *outdata++ = *yd2p++; *outdata++ = *yd2p++; *outdata++ = *udp++; *outdata++ = *vdp++; } break; } case GST_VIDEO_FORMAT_Y41B: { gint i; guint uvoff; guint8 *ydp, *udp, *vdp; ydp = yp + (lin * ystride) + offs; uvoff = (lin / yinc * uvstride) + (offs / xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *udp++; *outdata++ = *ydp++; *outdata++ = *ydp++; *outdata++ = *vdp++; *outdata++ = *ydp++; *outdata++ = *ydp++; } break; } default: gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto unknown_sampling; } if (!cont) break; } if (line >= height) { GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker"); gst_rtp_buffer_set_marker (&rtp, TRUE); complete = TRUE; } gst_rtp_buffer_unmap (&rtp); if (left > 0) { GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left); gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left); } /* Now either push out the buffer directly */ if (!use_buffer_lists) { ret = gst_rtp_base_payload_push (payload, out); continue; } /* or add the buffer to buffer list ... */ gst_buffer_list_add (list, out); /* .. and check if we need to push out the list */ pack_line = (line - field) / fields; if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) { GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack " "line %u", gst_buffer_list_length (list), pack_line); ret = gst_rtp_base_payload_push_list (payload, list); list = NULL; if (!complete) list = gst_buffer_list_new_sized (buffers_per_list); last_line = pack_line; } } } gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return ret; /* ERRORS */ unknown_sampling: { GST_ELEMENT_ERROR (payload, STREAM, FORMAT, (NULL), ("unimplemented sampling")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } too_small: { GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT, (NULL), ("not enough space to send at least one pixel")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } }
static GstFlowReturn gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer) { GstFlowReturn res; GstBuffer *outbuf; struct v4l2_buffer vbuffer; GstV4l2Object *obj = pool->obj; GstClockTime timestamp; if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { /* select works for input devices when data is available. According to the * specs we can also poll to find out when a frame has been displayed but * that just seems to lock up here */ if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK) goto poll_error; } memset (&vbuffer, 0x00, sizeof (vbuffer)); vbuffer.type = obj->type; vbuffer.memory = V4L2_MEMORY_MMAP; GST_LOG_OBJECT (pool, "doing DQBUF"); if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &vbuffer) < 0) goto error; /* get our GstBuffer with that index from the pool, if the buffer was * outstanding we have a serious problem. */ outbuf = pool->buffers[vbuffer.index]; if (outbuf == NULL) goto no_buffer; /* mark the buffer outstanding */ pool->buffers[vbuffer.index] = NULL; pool->num_queued--; timestamp = GST_TIMEVAL_TO_TIME (vbuffer.timestamp); GST_LOG_OBJECT (pool, "dequeued buffer %p seq:%d (ix=%d), used %d, flags %08x, ts %" GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf, vbuffer.sequence, vbuffer.index, vbuffer.bytesused, vbuffer.flags, GST_TIME_ARGS (timestamp), pool->num_queued, outbuf); /* set top/bottom field first if v4l2_buffer has the information */ if (vbuffer.field == V4L2_FIELD_INTERLACED_TB) { GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF); } if (vbuffer.field == V4L2_FIELD_INTERLACED_BT) { GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF); } /* this can change at every frame, esp. with jpeg */ if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) gst_buffer_resize (outbuf, 0, vbuffer.bytesused); else gst_buffer_resize (outbuf, 0, vbuffer.length); GST_BUFFER_TIMESTAMP (outbuf) = timestamp; *buffer = outbuf; return GST_FLOW_OK; /* ERRORS */ poll_error: { GST_DEBUG_OBJECT (pool, "poll error %s", gst_flow_get_name (res)); return res; } error: { GST_WARNING_OBJECT (pool, "problem dequeuing frame %d (ix=%d), pool-ct=%d, buf.flags=%d", vbuffer.sequence, vbuffer.index, GST_MINI_OBJECT_REFCOUNT (pool), vbuffer.flags); switch (errno) { case EAGAIN: GST_WARNING_OBJECT (pool, "Non-blocking I/O has been selected using O_NONBLOCK and" " no buffer was in the outgoing queue. device %s", obj->videodev); break; case EINVAL: GST_ERROR_OBJECT (pool, "The buffer type is not supported, or the index is out of bounds, " "or no buffers have been allocated yet, or the userptr " "or length are invalid. device %s", obj->videodev); break; case ENOMEM: GST_ERROR_OBJECT (pool, "insufficient memory to enqueue a user pointer buffer"); break; case EIO: GST_INFO_OBJECT (pool, "VIDIOC_DQBUF failed due to an internal error." " Can also indicate temporary problems like signal loss." " Note the driver might dequeue an (empty) buffer despite" " returning an error, or even stop capturing." " device %s", obj->videodev); /* have we de-queued a buffer ? */ if (!(vbuffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) { GST_DEBUG_OBJECT (pool, "reenqueing buffer"); /* FIXME ... should we do something here? */ } break; case EINTR: GST_WARNING_OBJECT (pool, "could not sync on a buffer on device %s", obj->videodev); break; default: GST_WARNING_OBJECT (pool, "Grabbing frame got interrupted on %s unexpectedly. %d: %s.", obj->videodev, errno, g_strerror (errno)); break; } return GST_FLOW_ERROR; } no_buffer: { GST_ERROR_OBJECT (pool, "No free buffer found in the pool at index %d.", vbuffer.index); return GST_FLOW_ERROR; } }
static GstBuffer * gst_rtp_jpeg_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) { GstRtpJPEGDepay *rtpjpegdepay; GstBuffer *outbuf; gint payload_len, header_len; guint8 *payload; guint frag_offset; gint Q; guint type, width, height; guint16 dri, precision, length; guint8 *qtable; rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload); if (GST_BUFFER_IS_DISCONT (rtp->buffer)) { GST_DEBUG_OBJECT (depayload, "DISCONT, reset adapter"); gst_adapter_clear (rtpjpegdepay->adapter); rtpjpegdepay->discont = TRUE; } payload_len = gst_rtp_buffer_get_payload_len (rtp); if (payload_len < 8) goto empty_packet; payload = gst_rtp_buffer_get_payload (rtp); header_len = 0; /* 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Type-specific | Fragment Offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Type | Q | Width | Height | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ frag_offset = (payload[1] << 16) | (payload[2] << 8) | payload[3]; type = payload[4]; Q = payload[5]; width = payload[6] * 8; height = payload[7] * 8; /* saw a packet with fragment offset > 0 and we don't already have data queued * up (most importantly, we don't have a header for this data) -- drop it * XXX: maybe we can check if the jpeg is progressive and salvage the data? * XXX: not implemented yet because jpegenc can't create progressive jpegs */ if (frag_offset > 0 && gst_adapter_available (rtpjpegdepay->adapter) == 0) goto no_header_packet; /* allow frame dimensions > 2040, passed in SDP session or media attributes * from gstrtspsrc.c (gst_rtspsrc_sdp_attributes_to_caps), or in caps */ if (!width) width = rtpjpegdepay->media_width; if (!height) height = rtpjpegdepay->media_height; if (width == 0 || height == 0) goto invalid_dimension; GST_DEBUG_OBJECT (rtpjpegdepay, "frag %u, type %u, Q %d, width %u, height %u", frag_offset, type, Q, width, height); header_len += 8; payload += 8; payload_len -= 8; dri = 0; if (type > 63) { if (payload_len < 4) goto empty_packet; /* 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Restart Interval |F|L| Restart Count | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ dri = (payload[0] << 8) | payload[1]; GST_DEBUG_OBJECT (rtpjpegdepay, "DRI %" G_GUINT16_FORMAT, dri); payload += 4; header_len += 4; payload_len -= 4; } if (Q >= 128 && frag_offset == 0) { if (payload_len < 4) goto empty_packet; /* 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | MBZ | Precision | Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Quantization Table Data | * | ... | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ precision = payload[1]; length = (payload[2] << 8) | payload[3]; GST_DEBUG_OBJECT (rtpjpegdepay, "precision %04x, length %" G_GUINT16_FORMAT, precision, length); if (Q == 255 && length == 0) goto empty_packet; payload += 4; header_len += 4; payload_len -= 4; if (length > payload_len) goto empty_packet; if (length > 0) qtable = payload; else qtable = rtpjpegdepay->qtables[Q]; payload += length; header_len += length; payload_len -= length; } else { length = 0; qtable = NULL; precision = 0; } if (frag_offset == 0) { GstMapInfo map; guint size; if (rtpjpegdepay->width != width || rtpjpegdepay->height != height) { GstCaps *outcaps; outcaps = gst_caps_new_simple ("image/jpeg", "framerate", GST_TYPE_FRACTION, rtpjpegdepay->frate_num, rtpjpegdepay->frate_denom, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); gst_pad_set_caps (depayload->srcpad, outcaps); gst_caps_unref (outcaps); rtpjpegdepay->width = width; rtpjpegdepay->height = height; } GST_LOG_OBJECT (rtpjpegdepay, "first packet, length %" G_GUINT16_FORMAT, length); /* first packet */ if (length == 0) { if (Q < 128) { /* no quant table, see if we have one cached */ qtable = rtpjpegdepay->qtables[Q]; if (!qtable) { GST_DEBUG_OBJECT (rtpjpegdepay, "making Q %d table", Q); /* make and cache the table */ qtable = g_new (guint8, 128); MakeTables (rtpjpegdepay, Q, qtable); rtpjpegdepay->qtables[Q] = qtable; } else { GST_DEBUG_OBJECT (rtpjpegdepay, "using cached table for Q %d", Q); } /* all 8 bit quantizers */ precision = 0; } else { if (!qtable) goto no_qtable; } } /* I think we can get here with a NULL qtable, so make sure we don't go dereferencing it in MakeHeaders if we do */ if (!qtable) goto no_qtable; /* max header length, should be big enough */ outbuf = gst_buffer_new_and_alloc (1000); gst_buffer_map (outbuf, &map, GST_MAP_WRITE); size = MakeHeaders (map.data, type, width, height, qtable, precision, dri); gst_buffer_unmap (outbuf, &map); gst_buffer_resize (outbuf, 0, size); GST_DEBUG_OBJECT (rtpjpegdepay, "pushing %u bytes of header", size); gst_adapter_push (rtpjpegdepay->adapter, outbuf); } /* take JPEG data, push in the adapter */ GST_DEBUG_OBJECT (rtpjpegdepay, "pushing data at offset %d", header_len); outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, header_len, -1); gst_adapter_push (rtpjpegdepay->adapter, outbuf); outbuf = NULL; if (gst_rtp_buffer_get_marker (rtp)) { guint avail; guint8 end[2]; GstMapInfo map; /* last buffer take all data out of the adapter */ avail = gst_adapter_available (rtpjpegdepay->adapter); GST_DEBUG_OBJECT (rtpjpegdepay, "marker set, last buffer"); if (avail < 2) goto invalid_packet; /* take the last bytes of the jpeg data to see if there is an EOI * marker */ gst_adapter_copy (rtpjpegdepay->adapter, end, avail - 2, 2); if (end[0] != 0xff && end[1] != 0xd9) { GST_DEBUG_OBJECT (rtpjpegdepay, "no EOI marker, adding one"); /* no EOI marker, add one */ outbuf = gst_buffer_new_and_alloc (2); gst_buffer_map (outbuf, &map, GST_MAP_WRITE); map.data[0] = 0xff; map.data[1] = 0xd9; gst_buffer_unmap (outbuf, &map); gst_adapter_push (rtpjpegdepay->adapter, outbuf); avail += 2; } outbuf = gst_adapter_take_buffer (rtpjpegdepay->adapter, avail); if (rtpjpegdepay->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); rtpjpegdepay->discont = FALSE; } gst_rtp_drop_meta (GST_ELEMENT_CAST (rtpjpegdepay), outbuf, g_quark_from_static_string (GST_META_TAG_VIDEO_STR)); GST_DEBUG_OBJECT (rtpjpegdepay, "returning %u bytes", avail); } return outbuf; /* ERRORS */ empty_packet: { GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, DECODE, ("Empty Payload."), (NULL)); return NULL; } invalid_dimension: { GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, FORMAT, ("Invalid Dimension %dx%d.", width, height), (NULL)); return NULL; } no_qtable: { GST_WARNING_OBJECT (rtpjpegdepay, "no qtable"); return NULL; } invalid_packet: { GST_WARNING_OBJECT (rtpjpegdepay, "invalid packet"); gst_adapter_flush (rtpjpegdepay->adapter, gst_adapter_available (rtpjpegdepay->adapter)); return NULL; } no_header_packet: { GST_WARNING_OBJECT (rtpjpegdepay, "discarding data packets received when we have no header"); return NULL; } }
static GstFlowReturn gst_irtsp_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { GstIRTSPParse *IRTSPParse = GST_IRTSP_PARSE (parse); GstBuffer *buf = frame->buffer; GstByteReader reader; gint off; GstMapInfo map; gboolean ret = FALSE; guint framesize; gst_buffer_map (buf, &map, GST_MAP_READ); if (G_UNLIKELY (map.size < 4)) goto exit; gst_byte_reader_init (&reader, map.data, map.size); off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x24000000 + (IRTSPParse->channel_id << 16), 0, map.size); GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); /* didn't find anything that looks like a sync word, skip */ if (off < 0) { *skipsize = map.size - 3; goto exit; } /* possible frame header, but not at offset 0? skip bytes before sync */ if (off > 0) { *skipsize = off; goto exit; } framesize = GST_READ_UINT16_BE (map.data + 2) + 4; GST_LOG_OBJECT (parse, "got frame size %d", framesize); ret = TRUE; if (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (parse))) { GstCaps *caps; caps = gst_caps_new_empty_simple ("application/x-rtp"); gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); gst_caps_unref (caps); } exit: gst_buffer_unmap (buf, &map); if (ret && framesize <= map.size) { /* HACK HACK skip header. * could also ask baseparse to skip this, * but that would give us a discontinuity for free * which is a bit too much to have on all our packets */ frame->out_buffer = gst_buffer_copy (frame->buffer); gst_buffer_resize (frame->out_buffer, 4, -1); GST_BUFFER_FLAG_UNSET (frame->out_buffer, GST_BUFFER_FLAG_DISCONT); return gst_base_parse_finish_frame (parse, frame, framesize); } return GST_FLOW_OK; }
static GstFlowReturn gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base, GstBuffer * inbuf, GstBuffer * outbuf) { GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base); GstClockTime timestamp, expected_timestamp; gint channels = GST_AUDIO_FILTER_CHANNELS (self); gint rate = GST_AUDIO_FILTER_RATE (self); gint bps = GST_AUDIO_FILTER_BPS (self); GstMapInfo inmap, outmap; guint input_samples; guint output_samples; guint generated_samples; guint64 output_offset; gint64 diff = 0; GstClockTime stream_time; timestamp = GST_BUFFER_TIMESTAMP (outbuf); if (!GST_CLOCK_TIME_IS_VALID (timestamp) && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) { GST_ERROR_OBJECT (self, "Invalid timestamp"); return GST_FLOW_ERROR; } g_mutex_lock (&self->lock); stream_time = gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp); GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); if (GST_CLOCK_TIME_IS_VALID (stream_time)) gst_object_sync_values (GST_OBJECT (self), stream_time); g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR); g_return_val_if_fail (channels != 0, GST_FLOW_ERROR); if (GST_CLOCK_TIME_IS_VALID (self->start_ts)) expected_timestamp = self->start_ts + gst_util_uint64_scale_int (self->nsamples_in, GST_SECOND, rate); else expected_timestamp = GST_CLOCK_TIME_NONE; /* Reset the residue if already existing on discont buffers */ if (GST_BUFFER_IS_DISCONT (inbuf) || (GST_CLOCK_TIME_IS_VALID (expected_timestamp) && (ABS (GST_CLOCK_DIFF (timestamp, expected_timestamp) > 5 * GST_MSECOND)))) { GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing"); if (GST_CLOCK_TIME_IS_VALID (expected_timestamp)) gst_audio_fx_base_fir_filter_push_residue (self); self->buffer_fill = 0; g_free (self->buffer); self->buffer = NULL; self->start_ts = timestamp; self->start_off = GST_BUFFER_OFFSET (inbuf); self->nsamples_out = 0; self->nsamples_in = 0; } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) { self->start_ts = timestamp; self->start_off = GST_BUFFER_OFFSET (inbuf); } gst_buffer_map (inbuf, &inmap, GST_MAP_READ); gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE); input_samples = (inmap.size / bps) / channels; output_samples = (outmap.size / bps) / channels; self->nsamples_in += input_samples; generated_samples = self->process (self, inmap.data, outmap.data, input_samples); gst_buffer_unmap (inbuf, &inmap); gst_buffer_unmap (outbuf, &outmap); g_assert (generated_samples <= output_samples); self->nsamples_out += generated_samples; if (generated_samples == 0) goto no_samples; /* Calculate the number of samples we can push out now without outputting * latency zeros in the beginning */ diff = ((gint64) self->nsamples_out) - ((gint64) self->latency); if (diff < 0) goto no_samples; if (diff < generated_samples) { gint64 tmp = diff; diff = generated_samples - diff; generated_samples = tmp; } else { diff = 0; } gst_buffer_resize (outbuf, diff * bps * channels, generated_samples * bps * channels); output_offset = self->nsamples_out - self->latency - generated_samples; GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND, rate); GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (output_samples, GST_SECOND, rate); if (self->start_off != GST_BUFFER_OFFSET_NONE) { GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset; GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + generated_samples; } else { GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE; } g_mutex_unlock (&self->lock); GST_DEBUG_OBJECT (self, "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %" GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %" G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d", gst_buffer_get_size (outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf), GST_BUFFER_OFFSET_END (outbuf), generated_samples); return GST_FLOW_OK; no_samples: { g_mutex_unlock (&self->lock); return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
static GstFlowReturn gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout) { /* Get all pads that have data for us and store them in a * new list. * * Calculate the current output offset/timestamp and * offset_end/timestamp_end. Allocate a silence buffer * for this and store it. * * For all pads: * 1) Once per input buffer (cached) * 1) Check discont (flag and timestamp with tolerance) * 2) If discont or new, resync. That means: * 1) Drop all start data of the buffer that comes before * the current position/offset. * 2) Calculate the offset (output segment!) that the first * frame of the input buffer corresponds to. Base this on * the running time. * * 2) If the current pad's offset/offset_end overlaps with the output * offset/offset_end, mix it at the appropiate position in the output * buffer and advance the pad's position. Remember if this pad needs * a new buffer to advance behind the output offset_end. * * 3) If we had no pad with a buffer, go EOS. * * 4) If we had at least one pad that did not advance behind output * offset_end, let collected be called again for the current * output offset/offset_end. */ GstElement *element; GstAudioAggregator *aagg; GList *iter; GstFlowReturn ret; GstBuffer *outbuf = NULL; gint64 next_offset; gint64 next_timestamp; gint rate, bpf; gboolean dropped = FALSE; gboolean is_eos = TRUE; gboolean is_done = TRUE; guint blocksize; element = GST_ELEMENT (agg); aagg = GST_AUDIO_AGGREGATOR (agg); /* Sync pad properties to the stream time */ gst_aggregator_iterate_sinkpads (agg, (GstAggregatorPadForeachFunc) GST_DEBUG_FUNCPTR (sync_pad_values), NULL); GST_AUDIO_AGGREGATOR_LOCK (aagg); GST_OBJECT_LOCK (agg); /* Update position from the segment start/stop if needed */ if (agg->segment.position == -1) { if (agg->segment.rate > 0.0) agg->segment.position = agg->segment.start; else agg->segment.position = agg->segment.stop; } if (G_UNLIKELY (aagg->info.finfo->format == GST_AUDIO_FORMAT_UNKNOWN)) { if (timeout) { GST_DEBUG_OBJECT (aagg, "Got timeout before receiving any caps, don't output anything"); /* Advance position */ if (agg->segment.rate > 0.0) agg->segment.position += aagg->priv->output_buffer_duration; else if (agg->segment.position > aagg->priv->output_buffer_duration) agg->segment.position -= aagg->priv->output_buffer_duration; else agg->segment.position = 0; GST_OBJECT_UNLOCK (agg); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_OK; } else { GST_OBJECT_UNLOCK (agg); goto not_negotiated; } } if (aagg->priv->send_caps) { GST_OBJECT_UNLOCK (agg); gst_aggregator_set_src_caps (agg, aagg->current_caps); GST_OBJECT_LOCK (agg); aagg->priv->send_caps = FALSE; } rate = GST_AUDIO_INFO_RATE (&aagg->info); bpf = GST_AUDIO_INFO_BPF (&aagg->info); if (aagg->priv->offset == -1) { aagg->priv->offset = gst_util_uint64_scale (agg->segment.position - agg->segment.start, rate, GST_SECOND); GST_DEBUG_OBJECT (aagg, "Starting at offset %" G_GINT64_FORMAT, aagg->priv->offset); } blocksize = gst_util_uint64_scale (aagg->priv->output_buffer_duration, rate, GST_SECOND); blocksize = MAX (1, blocksize); /* for the next timestamp, use the sample counter, which will * never accumulate rounding errors */ /* FIXME: Reverse mixing does not work at all yet */ if (agg->segment.rate > 0.0) { next_offset = aagg->priv->offset + blocksize; } else { next_offset = aagg->priv->offset - blocksize; } next_timestamp = agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND, rate); if (aagg->priv->current_buffer == NULL) { GST_OBJECT_UNLOCK (agg); aagg->priv->current_buffer = GST_AUDIO_AGGREGATOR_GET_CLASS (aagg)->create_output_buffer (aagg, blocksize); /* Be careful, some things could have changed ? */ GST_OBJECT_LOCK (agg); GST_BUFFER_FLAG_SET (aagg->priv->current_buffer, GST_BUFFER_FLAG_GAP); } outbuf = aagg->priv->current_buffer; GST_LOG_OBJECT (agg, "Starting to mix %u samples for offset %" G_GINT64_FORMAT " with timestamp %" GST_TIME_FORMAT, blocksize, aagg->priv->offset, GST_TIME_ARGS (agg->segment.position)); for (iter = element->sinkpads; iter; iter = iter->next) { GstBuffer *inbuf; GstAudioAggregatorPad *pad = (GstAudioAggregatorPad *) iter->data; GstAggregatorPad *aggpad = (GstAggregatorPad *) iter->data; gboolean drop_buf = FALSE; gboolean pad_eos = gst_aggregator_pad_is_eos (aggpad); if (!pad_eos) is_eos = FALSE; inbuf = gst_aggregator_pad_get_buffer (aggpad); GST_OBJECT_LOCK (pad); if (!inbuf) { if (timeout) { if (pad->priv->output_offset < next_offset) { gint64 diff = next_offset - pad->priv->output_offset; GST_LOG_OBJECT (pad, "Timeout, missing %" G_GINT64_FORMAT " frames (%" GST_TIME_FORMAT ")", diff, GST_TIME_ARGS (gst_util_uint64_scale (diff, GST_SECOND, GST_AUDIO_INFO_RATE (&aagg->info)))); } } else if (!pad_eos) { is_done = FALSE; } GST_OBJECT_UNLOCK (pad); continue; } g_assert (!pad->priv->buffer || pad->priv->buffer == inbuf); /* New buffer? */ if (!pad->priv->buffer) { /* Takes ownership of buffer */ if (!gst_audio_aggregator_fill_buffer (aagg, pad, inbuf)) { dropped = TRUE; GST_OBJECT_UNLOCK (pad); gst_aggregator_pad_drop_buffer (aggpad); continue; } } else { gst_buffer_unref (inbuf); } if (!pad->priv->buffer && !dropped && pad_eos) { GST_DEBUG_OBJECT (aggpad, "Pad is in EOS state"); GST_OBJECT_UNLOCK (pad); continue; } g_assert (pad->priv->buffer); /* This pad is lacking behind, we need to update the offset * and maybe drop the current buffer */ if (pad->priv->output_offset < aagg->priv->offset) { gint64 diff = aagg->priv->offset - pad->priv->output_offset; gint64 odiff = diff; if (pad->priv->position + diff > pad->priv->size) diff = pad->priv->size - pad->priv->position; pad->priv->position += diff; pad->priv->output_offset += diff; if (pad->priv->position == pad->priv->size) { GST_LOG_OBJECT (pad, "Buffer was late by %" GST_TIME_FORMAT ", dropping %" GST_PTR_FORMAT, GST_TIME_ARGS (gst_util_uint64_scale (odiff, GST_SECOND, GST_AUDIO_INFO_RATE (&aagg->info))), pad->priv->buffer); /* Buffer done, drop it */ gst_buffer_replace (&pad->priv->buffer, NULL); dropped = TRUE; GST_OBJECT_UNLOCK (pad); gst_aggregator_pad_drop_buffer (aggpad); continue; } } if (pad->priv->output_offset >= aagg->priv->offset && pad->priv->output_offset < aagg->priv->offset + blocksize && pad->priv->buffer) { GST_LOG_OBJECT (aggpad, "Mixing buffer for current offset"); drop_buf = !gst_audio_aggregator_mix_buffer (aagg, pad, pad->priv->buffer, outbuf); if (pad->priv->output_offset >= next_offset) { GST_DEBUG_OBJECT (pad, "Pad is after current offset: %" G_GUINT64_FORMAT " >= %" G_GINT64_FORMAT, pad->priv->output_offset, next_offset); } else { is_done = FALSE; } } GST_OBJECT_UNLOCK (pad); if (drop_buf) gst_aggregator_pad_drop_buffer (aggpad); } GST_OBJECT_UNLOCK (agg); if (dropped) { /* We dropped a buffer, retry */ GST_INFO_OBJECT (aagg, "A pad dropped a buffer, wait for the next one"); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_OK; } if (!is_done && !is_eos) { /* Get more buffers */ GST_INFO_OBJECT (aagg, "We're not done yet for the current offset," " waiting for more data"); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_OK; } if (is_eos) { gint64 max_offset = 0; GST_DEBUG_OBJECT (aagg, "We're EOS"); GST_OBJECT_LOCK (agg); for (iter = GST_ELEMENT (agg)->sinkpads; iter; iter = iter->next) { GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data); max_offset = MAX ((gint64) max_offset, (gint64) pad->priv->output_offset); } GST_OBJECT_UNLOCK (agg); /* This means EOS or nothing mixed in at all */ if (aagg->priv->offset == max_offset) { gst_buffer_replace (&aagg->priv->current_buffer, NULL); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return GST_FLOW_EOS; } if (max_offset <= next_offset) { GST_DEBUG_OBJECT (aagg, "Last buffer is incomplete: %" G_GUINT64_FORMAT " <= %" G_GINT64_FORMAT, max_offset, next_offset); next_offset = max_offset; next_timestamp = agg->segment.start + gst_util_uint64_scale (next_offset, GST_SECOND, rate); if (next_offset > aagg->priv->offset) gst_buffer_resize (outbuf, 0, (next_offset - aagg->priv->offset) * bpf); } } /* set timestamps on the output buffer */ GST_OBJECT_LOCK (agg); if (agg->segment.rate > 0.0) { GST_BUFFER_PTS (outbuf) = agg->segment.position; GST_BUFFER_OFFSET (outbuf) = aagg->priv->offset; GST_BUFFER_OFFSET_END (outbuf) = next_offset; GST_BUFFER_DURATION (outbuf) = next_timestamp - agg->segment.position; } else { GST_BUFFER_PTS (outbuf) = next_timestamp; GST_BUFFER_OFFSET (outbuf) = next_offset; GST_BUFFER_OFFSET_END (outbuf) = aagg->priv->offset; GST_BUFFER_DURATION (outbuf) = agg->segment.position - next_timestamp; } GST_OBJECT_UNLOCK (agg); /* send it out */ GST_LOG_OBJECT (aagg, "pushing outbuf %p, timestamp %" GST_TIME_FORMAT " offset %" G_GINT64_FORMAT, outbuf, GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)), GST_BUFFER_OFFSET (outbuf)); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); ret = gst_aggregator_finish_buffer (agg, aagg->priv->current_buffer); aagg->priv->current_buffer = NULL; GST_LOG_OBJECT (aagg, "pushed outbuf, result = %s", gst_flow_get_name (ret)); GST_AUDIO_AGGREGATOR_LOCK (aagg); GST_OBJECT_LOCK (agg); aagg->priv->offset = next_offset; agg->segment.position = next_timestamp; /* If there was a timeout and there was a gap in data in out of the streams, * then it's a very good time to for a resync with the timestamps. */ if (timeout) { for (iter = element->sinkpads; iter; iter = iter->next) { GstAudioAggregatorPad *pad = GST_AUDIO_AGGREGATOR_PAD (iter->data); GST_OBJECT_LOCK (pad); if (pad->priv->output_offset < aagg->priv->offset) pad->priv->output_offset = -1; GST_OBJECT_UNLOCK (pad); } } GST_OBJECT_UNLOCK (agg); GST_AUDIO_AGGREGATOR_UNLOCK (aagg); return ret; /* ERRORS */ not_negotiated: { GST_AUDIO_AGGREGATOR_UNLOCK (aagg); GST_ELEMENT_ERROR (aagg, STREAM, FORMAT, (NULL), ("Unknown data received, not negotiated")); return GST_FLOW_NOT_NEGOTIATED; } }