static GstFlowReturn gst_siren_dec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buf) { GstSirenDec *dec; GstFlowReturn ret = GST_FLOW_OK; GstBuffer *out_buf; guint8 *in_data, *out_data; guint i, size, num_frames; gint out_size, in_size; gint decode_ret; GstMapInfo inmap, outmap; dec = GST_SIREN_DEC (bdec); size = gst_buffer_get_size (buf); GST_LOG_OBJECT (dec, "Received buffer of size %u", size); g_return_val_if_fail (size % 40 == 0, GST_FLOW_ERROR); g_return_val_if_fail (size > 0, GST_FLOW_ERROR); /* process 40 input bytes into 640 output bytes */ num_frames = size / 40; /* this is the input/output size */ in_size = num_frames * 40; out_size = num_frames * 640; GST_LOG_OBJECT (dec, "we have %u frames, %u in, %u out", num_frames, in_size, out_size); out_buf = gst_audio_decoder_allocate_output_buffer (bdec, out_size); if (out_buf == NULL) goto alloc_failed; /* get the input data for all the frames */ gst_buffer_map (buf, &inmap, GST_MAP_READ); gst_buffer_map (out_buf, &outmap, GST_MAP_WRITE); in_data = inmap.data; out_data = outmap.data; for (i = 0; i < num_frames; i++) { GST_LOG_OBJECT (dec, "Decoding frame %u/%u", i, num_frames); /* decode 40 input bytes to 640 output bytes */ decode_ret = Siren7_DecodeFrame (dec->decoder, in_data, out_data); if (decode_ret != 0) goto decode_error; /* move to next frame */ out_data += 640; in_data += 40; } gst_buffer_unmap (buf, &inmap); gst_buffer_unmap (out_buf, &outmap); GST_LOG_OBJECT (dec, "Finished decoding"); /* might really be multiple frames, * but was treated as one for all purposes here */ ret = gst_audio_decoder_finish_frame (bdec, out_buf, 1); done: return ret; /* ERRORS */ alloc_failed: { GST_DEBUG_OBJECT (dec, "failed to pad_alloc buffer: %d (%s)", ret, gst_flow_get_name (ret)); goto done; } decode_error: { GST_AUDIO_DECODER_ERROR (bdec, 1, STREAM, DECODE, (NULL), ("Error decoding frame: %d", decode_ret), ret); if (ret == GST_FLOW_OK) gst_audio_decoder_finish_frame (bdec, NULL, 1); gst_buffer_unref (out_buf); goto done; } }
static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer) { GstFlowReturn res = GST_FLOW_OK; gsize size; guint8 *data; GstBuffer *outbuf; gint16 *out_data; int n, err; int samples; unsigned int packet_size; GstBuffer *buf; GstMapInfo map, omap; if (dec->state == NULL) { /* If we did not get any headers, default to 2 channels */ if (dec->n_channels == 0) { GST_INFO_OBJECT (dec, "No header, assuming single stream"); dec->n_channels = 2; dec->sample_rate = 48000; /* default stereo mapping */ dec->channel_mapping_family = 0; dec->channel_mapping[0] = 0; dec->channel_mapping[1] = 1; dec->n_streams = 1; dec->n_stereo_streams = 1; gst_opus_dec_negotiate (dec, NULL); } GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz", dec->n_channels, dec->sample_rate); #ifndef GST_DISABLE_DEBUG gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug, "Mapping table", dec->n_channels, dec->channel_mapping); #endif GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams, dec->n_stereo_streams); dec->state = opus_multistream_decoder_create (dec->sample_rate, dec->n_channels, dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err); if (!dec->state || err != OPUS_OK) goto creation_failed; } if (buffer) { GST_DEBUG_OBJECT (dec, "Received buffer of size %u", gst_buffer_get_size (buffer)); } else { GST_DEBUG_OBJECT (dec, "Received missing buffer"); } /* if using in-band FEC, we introdude one extra frame's delay as we need to potentially wait for next buffer to decode a missing buffer */ if (dec->use_inband_fec && !dec->primed) { GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out"); gst_buffer_replace (&dec->last_buffer, buffer); dec->primed = TRUE; goto done; } /* That's the buffer we'll be sending to the opus decoder. */ buf = (dec->use_inband_fec && gst_buffer_get_size (dec->last_buffer) > 0) ? dec->last_buffer : buffer; if (buf && gst_buffer_get_size (buf) > 0) { gst_buffer_map (buf, &map, GST_MAP_READ); data = map.data; size = map.size; GST_DEBUG_OBJECT (dec, "Using buffer of size %u", size); } else { /* concealment data, pass NULL as the bits parameters */ GST_DEBUG_OBJECT (dec, "Using NULL buffer"); data = NULL; size = 0; } /* use maximum size (120 ms) as the number of returned samples is not constant over the stream. */ samples = 120 * dec->sample_rate / 1000; packet_size = samples * dec->n_channels * 2; outbuf = gst_buffer_new_and_alloc (packet_size); if (!outbuf) { goto buffer_failed; } gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); out_data = (gint16 *) omap.data; if (dec->use_inband_fec) { if (dec->last_buffer) { /* normal delayed decode */ GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } else { /* FEC reconstruction decode */ GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 1); } } else { /* normal decode */ GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } gst_buffer_unmap (outbuf, &omap); if (buf) gst_buffer_unmap (buf, &map); if (n < 0) { GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL)); gst_buffer_unref (outbuf); return GST_FLOW_ERROR; } GST_DEBUG_OBJECT (dec, "decoded %d samples", n); gst_buffer_set_size (outbuf, n * 2 * dec->n_channels); /* Skip any samples that need skipping */ if (dec->pre_skip > 0) { guint scaled_pre_skip = dec->pre_skip * dec->sample_rate / 48000; guint skip = scaled_pre_skip > n ? n : scaled_pre_skip; guint scaled_skip = skip * 48000 / dec->sample_rate; gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1); dec->pre_skip -= scaled_skip; GST_INFO_OBJECT (dec, "Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip, scaled_skip, dec->pre_skip); } if (gst_buffer_get_size (outbuf) == 0) { gst_buffer_unref (outbuf); outbuf = NULL; } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) { gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16, dec->n_channels, dec->opus_pos, dec->info.position); } /* Apply gain */ /* Would be better off leaving this to a volume element, as this is a naive conversion that does too many int/float conversions. However, we don't have control over the pipeline... So make it optional if the user program wants to use a volume, but do it by default so the correct volume goes out by default */ if (dec->apply_gain && outbuf && dec->r128_gain) { gsize rsize; unsigned int i, nsamples; double volume = dec->r128_gain_volume; gint16 *samples; gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE); samples = (gint16 *) omap.data; rsize = omap.size; GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume); nsamples = rsize / 2; for (i = 0; i < nsamples; ++i) { int sample = (int) (samples[i] * volume + 0.5); samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample; } gst_buffer_unmap (outbuf, &omap); } if (dec->use_inband_fec) { gst_buffer_replace (&dec->last_buffer, buffer); } res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1); if (res != GST_FLOW_OK) GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res)); done: return res; creation_failed: GST_ERROR_OBJECT (dec, "Failed to create Opus decoder: %d", err); return GST_FLOW_ERROR; buffer_failed: GST_ERROR_OBJECT (dec, "Failed to create %u byte buffer", packet_size); return GST_FLOW_ERROR; }
static gboolean gst_bz2enc_event (GstPad * pad, GstObject * parent, GstEvent * e) { GstBz2enc *b; gboolean ret; b = GST_BZ2ENC (parent); switch (GST_EVENT_TYPE (e)) { case GST_EVENT_EOS:{ GstFlowReturn flow; int r = BZ_FINISH_OK; do { GstBuffer *out; GstMapInfo omap; guint n; out = gst_buffer_new_and_alloc (b->buffer_size); gst_buffer_map (out, &omap, GST_MAP_WRITE); b->stream.next_out = (char *) omap.data; b->stream.avail_out = omap.size; r = BZ2_bzCompress (&b->stream, BZ_FINISH); gst_buffer_unmap (out, &omap); if ((r != BZ_FINISH_OK) && (r != BZ_STREAM_END)) { GST_ELEMENT_ERROR (b, STREAM, ENCODE, (NULL), ("Failed to finish to compress (error code %i).", r)); gst_buffer_unref (out); break; } n = gst_buffer_get_size (out); if (b->stream.avail_out >= n) { gst_buffer_unref (out); break; } gst_buffer_resize (out, 0, n - b->stream.avail_out); n = gst_buffer_get_size (out); GST_BUFFER_OFFSET (out) = b->stream.total_out_lo32 - n; flow = gst_pad_push (b->src, out); if (flow != GST_FLOW_OK) { GST_DEBUG_OBJECT (b, "push on EOS failed: %s", gst_flow_get_name (flow)); break; } } while (r != BZ_STREAM_END); ret = gst_pad_event_default (pad, parent, e); if (r != BZ_STREAM_END || flow != GST_FLOW_OK) ret = FALSE; gst_bz2enc_compress_init (b); break; } default: ret = gst_pad_event_default (pad, parent, e); break; } return ret; }
/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits * pages to output pad. */ static GstFlowReturn gst_ogg_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstOggParse *ogg; GstFlowReturn result = GST_FLOW_OK; gint ret = -1; guint32 serialno; GstBuffer *pagebuffer; GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer); ogg = GST_OGG_PARSE (parent); GST_LOG_OBJECT (ogg, "Chain function received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); gst_ogg_parse_submit_buffer (ogg, buffer); while (ret != 0 && result == GST_FLOW_OK) { ogg_page page; /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can * track how many bytes the ogg layer discarded (in the case of sync errors, * etc.); this allows us to accurately track the current stream offset */ ret = ogg_sync_pageseek (&ogg->sync, &page); if (ret == 0) { /* need more data, that's fine... */ break; } else if (ret < 0) { /* discontinuity; track how many bytes we skipped (-ret) */ ogg->offset -= ret; } else { gint64 granule = ogg_page_granulepos (&page); #ifndef GST_DISABLE_GST_DEBUG int bos = ogg_page_bos (&page); #endif guint64 startoffset = ogg->offset; GstOggStream *stream; gboolean keyframe; serialno = ogg_page_serialno (&page); stream = gst_ogg_parse_find_stream (ogg, serialno); GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT, GST_TIME_ARGS (buffertimestamp)); if (stream) { buffertimestamp = gst_ogg_stream_get_end_time_for_granulepos (stream, granule); if (ogg->video_stream) { if (stream == ogg->video_stream) { keyframe = gst_ogg_stream_granulepos_is_key_frame (stream, granule); } else { keyframe = FALSE; } } else { keyframe = TRUE; } } else { buffertimestamp = GST_CLOCK_TIME_NONE; keyframe = TRUE; } pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset, buffertimestamp); /* We read out 'ret' bytes, so we set the next offset appropriately */ ogg->offset += ret; GST_LOG_OBJECT (ogg, "processing ogg page (serial %08x, pageno %ld, " "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ") keyframe=%d", serialno, ogg_page_pageno (&page), granule, bos, startoffset, ogg->offset, keyframe); if (ogg_page_bos (&page)) { /* If we've seen this serialno before, this is technically an error, * we log this case but accept it - this one replaces the previous * stream with this serialno. We can do this since we're streaming, and * not supporting seeking... */ GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (stream != NULL) { GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %08x " "at offset %" G_GINT64_FORMAT, serialno, ogg->offset); } if (ogg->last_page_not_bos) { GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new " "chain starting with serial %u", serialno); gst_ogg_parse_delete_all_streams (ogg); } stream = gst_ogg_parse_new_stream (ogg, &page); ogg->last_page_not_bos = FALSE; gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Found start of new chain at offset %" G_GUINT64_FORMAT, startoffset); ogg->in_headers = 1; } /* For now, we just keep the header buffer in the stream->headers list; * it actually gets output once we've collected the entire set */ } else { /* Non-BOS page. Either: we're outside headers, and this isn't a * header (normal data), outside headers and this is (error!), inside * headers, this is (append header), or inside headers and this isn't * (we've found the end of headers; flush the lot!) * * Before that, we flag that the last page seen (this one) was not a * BOS page; that way we know that when we next see a BOS page it's a * new chain, and we can flush all existing streams. */ page_type type; GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno); if (!stream) { GST_LOG_OBJECT (ogg, "Non-BOS page unexpectedly found at %" G_GINT64_FORMAT, ogg->offset); goto failure; } ogg->last_page_not_bos = TRUE; type = gst_ogg_parse_is_header (ogg, stream, &page); if (type == PAGE_PENDING && ogg->in_headers) { gst_buffer_ref (pagebuffer); stream->unknown_pages = g_list_append (stream->unknown_pages, pagebuffer); } else if (type == PAGE_HEADER) { if (!ogg->in_headers) { GST_LOG_OBJECT (ogg, "Header page unexpectedly found outside " "headers at offset %" G_GINT64_FORMAT, ogg->offset); goto failure; } else { /* Append the header to the buffer list, after any unknown previous * pages */ stream->headers = g_list_concat (stream->headers, stream->unknown_pages); g_list_free (stream->unknown_pages); gst_buffer_ref (pagebuffer); stream->headers = g_list_append (stream->headers, pagebuffer); } } else { /* PAGE_DATA, or PAGE_PENDING but outside headers */ if (ogg->in_headers) { /* First non-header page... set caps, flush headers. * * First up, we build a single GValue list of all the pagebuffers * we're using for the headers, in order. * Then we set this on the caps structure. Then we can start pushing * buffers for the headers, and finally we send this non-header * page. */ GstCaps *caps; GstStructure *structure; GValue array = { 0 }; gint count = 0; gboolean found_pending_headers = FALSE; GSList *l; g_value_init (&array, GST_TYPE_ARRAY); for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; if (g_list_length (stream->headers) == 0) { GST_LOG_OBJECT (ogg, "No primary header found for stream %08x", stream->serialno); goto failure; } gst_ogg_parse_append_header (&array, GST_BUFFER (stream->headers->data)); count++; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* already appended the first header, now do headers 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { gst_ogg_parse_append_header (&array, GST_BUFFER (j->data)); count++; } } caps = gst_pad_query_caps (ogg->srcpad, NULL); caps = gst_caps_make_writable (caps); structure = gst_caps_get_structure (caps, 0); gst_structure_take_value (structure, "streamheader", &array); gst_pad_set_caps (ogg->srcpad, caps); if (ogg->caps) gst_caps_unref (ogg->caps); ogg->caps = caps; GST_LOG_OBJECT (ogg, "Set \"streamheader\" caps with %d buffers " "(one per page)", count); /* Now, we do the same thing, but push buffers... */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GstBuffer *buf = GST_BUFFER (stream->headers->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *j; /* pushed the first one for each stream already, now do 2-N */ for (j = stream->headers->next; j != NULL; j = j->next) { GstBuffer *buf = GST_BUFFER (j->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } } ogg->in_headers = 0; /* And finally the pending data pages */ for (l = ogg->oggstreams; l != NULL; l = l->next) { GstOggStream *stream = (GstOggStream *) l->data; GList *k; if (stream->unknown_pages == NULL) continue; if (found_pending_headers) { GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at " "approximate offset %" G_GINT64_FORMAT, ogg->offset); } found_pending_headers = TRUE; GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers", g_list_length (stream->unknown_pages) + 1); for (k = stream->unknown_pages; k != NULL; k = k->next) { GstBuffer *buf = GST_BUFFER (k->data); result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; } g_list_foreach (stream->unknown_pages, (GFunc) gst_mini_object_unref, NULL); g_list_free (stream->unknown_pages); stream->unknown_pages = NULL; } } if (granule == -1) { stream->stored_buffers = g_list_append (stream->stored_buffers, pagebuffer); } else { while (stream->stored_buffers) { GstBuffer *buf = stream->stored_buffers->data; buf = gst_buffer_make_writable (buf); GST_BUFFER_TIMESTAMP (buf) = buffertimestamp; if (!keyframe) { GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, buf); if (result != GST_FLOW_OK) return result; stream->stored_buffers = g_list_delete_link (stream->stored_buffers, stream->stored_buffers); } pagebuffer = gst_buffer_make_writable (pagebuffer); if (!keyframe) { GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT); } else { keyframe = FALSE; } result = gst_pad_push (ogg->srcpad, pagebuffer); if (result != GST_FLOW_OK) return result; } } } } } return result; failure: gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ()); return GST_FLOW_ERROR; }
/* for off == 4 initial code; returns TRUE if code starts a frame * otherwise returns TRUE if code terminates preceding frame */ static gboolean gst_mpegv_parse_process_sc (GstMpegvParse * mpvparse, GstBuffer * buf, gint off, guint8 code) { gboolean ret = FALSE, packet = TRUE; g_return_val_if_fail (buf && gst_buffer_get_size (buf) >= 4, FALSE); GST_LOG_OBJECT (mpvparse, "process startcode %x (%s)", code, picture_start_code_name (code)); switch (code) { case GST_MPEG_VIDEO_PACKET_PICTURE: GST_LOG_OBJECT (mpvparse, "startcode is PICTURE"); /* picture is aggregated with preceding sequence/gop, if any. * so, picture start code only ends if already a previous one */ if (mpvparse->pic_offset < 0) mpvparse->pic_offset = off; else ret = (off != mpvparse->pic_offset); /* but it's a valid starting one */ if (off == 4) ret = TRUE; break; case GST_MPEG_VIDEO_PACKET_SEQUENCE: GST_LOG_OBJECT (mpvparse, "startcode is SEQUENCE"); if (mpvparse->seq_offset < 0) mpvparse->seq_offset = off; ret = TRUE; break; case GST_MPEG_VIDEO_PACKET_GOP: GST_LOG_OBJECT (mpvparse, "startcode is GOP"); if (mpvparse->seq_offset >= 0) ret = mpvparse->gop_split; else ret = TRUE; break; case GST_MPEG_VIDEO_PACKET_EXTENSION: GST_LOG_OBJECT (mpvparse, "startcode is VIDEO PACKET EXTENSION"); parse_picture_extension (mpvparse, buf, off); if (mpvparse->ext_count < G_N_ELEMENTS (mpvparse->ext_offsets)) mpvparse->ext_offsets[mpvparse->ext_count++] = off; /* fall-through */ default: packet = FALSE; break; } /* set size to avoid processing config again */ if (mpvparse->seq_offset >= 0 && off != mpvparse->seq_offset && !mpvparse->seq_size && packet) { /* should always be at start */ g_assert (mpvparse->seq_offset <= 4); gst_mpegv_parse_process_config (mpvparse, buf, off - mpvparse->seq_offset); mpvparse->seq_size = off - mpvparse->seq_offset; } /* extract some picture info if there is any in the frame being terminated */ if (ret && mpvparse->pic_offset >= 0 && mpvparse->pic_offset < off) { GstMapInfo map; gst_buffer_map (buf, &map, GST_MAP_READ); if (gst_mpeg_video_parse_picture_header (&mpvparse->pichdr, map.data, map.size, mpvparse->pic_offset)) GST_LOG_OBJECT (mpvparse, "picture_coding_type %d (%s), ending" "frame of size %d", mpvparse->pichdr.pic_type, picture_type_name (mpvparse->pichdr.pic_type), off - 4); else GST_LOG_OBJECT (mpvparse, "Couldn't parse picture at offset %d", mpvparse->pic_offset); gst_buffer_unmap (buf, &map); } return ret; }
static void gst_type_find_element_loop (GstPad * pad) { GstTypeFindElement *typefind; GstFlowReturn ret = GST_FLOW_OK; typefind = GST_TYPE_FIND_ELEMENT (GST_PAD_PARENT (pad)); if (typefind->need_stream_start) { gchar *stream_id; GstEvent *event; stream_id = gst_pad_create_stream_id (typefind->src, GST_ELEMENT_CAST (typefind), NULL); GST_DEBUG_OBJECT (typefind, "Pushing STREAM_START"); event = gst_event_new_stream_start (stream_id); gst_event_set_group_id (event, gst_util_group_id_next ()); gst_pad_push_event (typefind->src, event); typefind->need_stream_start = FALSE; g_free (stream_id); } if (typefind->mode == MODE_TYPEFIND) { GstPad *peer = NULL; GstCaps *found_caps = NULL; GstTypeFindProbability probability = GST_TYPE_FIND_NONE; GST_DEBUG_OBJECT (typefind, "find type in pull mode"); GST_OBJECT_LOCK (typefind); if (typefind->force_caps) { found_caps = gst_caps_ref (typefind->force_caps); probability = GST_TYPE_FIND_MAXIMUM; } GST_OBJECT_UNLOCK (typefind); if (!found_caps) { peer = gst_pad_get_peer (pad); if (peer) { gint64 size; gchar *ext; if (!gst_pad_query_duration (peer, GST_FORMAT_BYTES, &size)) { GST_WARNING_OBJECT (typefind, "Could not query upstream length!"); gst_object_unref (peer); ret = GST_FLOW_ERROR; goto pause; } /* the size if 0, we cannot continue */ if (size == 0) { /* keep message in sync with message in sink event handler */ GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (_("Stream contains no data.")), ("Can't typefind empty stream")); gst_object_unref (peer); ret = GST_FLOW_ERROR; goto pause; } ext = gst_type_find_get_extension (typefind, pad); found_caps = gst_type_find_helper_get_range (GST_OBJECT_CAST (peer), GST_OBJECT_PARENT (peer), (GstTypeFindHelperGetRangeFunction) (GST_PAD_GETRANGEFUNC (peer)), (guint64) size, ext, &probability); g_free (ext); GST_DEBUG ("Found caps %" GST_PTR_FORMAT, found_caps); gst_object_unref (peer); } } if (!found_caps || probability < typefind->min_probability) { GST_DEBUG ("Trying to guess using extension"); gst_caps_replace (&found_caps, NULL); found_caps = gst_type_find_guess_by_extension (typefind, pad, &probability); } if (!found_caps || probability < typefind->min_probability) { GST_ELEMENT_ERROR (typefind, STREAM, TYPE_NOT_FOUND, (NULL), (NULL)); gst_caps_replace (&found_caps, NULL); ret = GST_FLOW_ERROR; goto pause; } GST_DEBUG ("Emiting found caps %" GST_PTR_FORMAT, found_caps); gst_type_find_element_emit_have_type (typefind, probability, found_caps); typefind->mode = MODE_NORMAL; gst_caps_unref (found_caps); } else if (typefind->mode == MODE_NORMAL) { GstBuffer *outbuf = NULL; if (typefind->need_segment) { typefind->need_segment = FALSE; gst_pad_push_event (typefind->src, gst_event_new_segment (&typefind->segment)); } /* Pull 4k blocks and send downstream */ ret = gst_pad_pull_range (typefind->sink, typefind->offset, 4096, &outbuf); if (ret != GST_FLOW_OK) goto pause; typefind->offset += gst_buffer_get_size (outbuf); ret = gst_pad_push (typefind->src, outbuf); if (ret != GST_FLOW_OK) goto pause; } else { /* Error out */ ret = GST_FLOW_ERROR; goto pause; } return; pause: { const gchar *reason = gst_flow_get_name (ret); gboolean push_eos = FALSE; GST_LOG_OBJECT (typefind, "pausing task, reason %s", reason); gst_pad_pause_task (typefind->sink); if (ret == GST_FLOW_EOS) { /* perform EOS logic */ if (typefind->segment.flags & GST_SEGMENT_FLAG_SEGMENT) { gint64 stop; /* for segment playback we need to post when (in stream time) * we stopped, this is either stop (when set) or the duration. */ if ((stop = typefind->segment.stop) == -1) stop = typefind->offset; GST_LOG_OBJECT (typefind, "Sending segment done, at end of segment"); gst_element_post_message (GST_ELEMENT (typefind), gst_message_new_segment_done (GST_OBJECT (typefind), GST_FORMAT_BYTES, stop)); gst_pad_push_event (typefind->src, gst_event_new_segment_done (GST_FORMAT_BYTES, stop)); } else { push_eos = TRUE; } } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { /* for fatal errors we post an error message */ GST_ELEMENT_ERROR (typefind, STREAM, FAILED, (NULL), ("stream stopped, reason %s", reason)); push_eos = TRUE; } if (push_eos) { /* send EOS, and prevent hanging if no streams yet */ GST_LOG_OBJECT (typefind, "Sending EOS, at end of stream"); gst_pad_push_event (typefind->src, gst_event_new_eos ()); } return; } }
static GstFlowReturn gst_dvd_spu_subpic_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstDVDSpu *dvdspu = (GstDVDSpu *) parent; GstFlowReturn ret = GST_FLOW_OK; gsize size; g_return_val_if_fail (dvdspu != NULL, GST_FLOW_ERROR); GST_INFO_OBJECT (dvdspu, "Have subpicture buffer with timestamp %" GST_TIME_FORMAT " and size %" G_GSIZE_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), gst_buffer_get_size (buf)); DVD_SPU_LOCK (dvdspu); if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { dvdspu->subp_seg.position = GST_BUFFER_TIMESTAMP (buf); } if (GST_BUFFER_IS_DISCONT (buf) && dvdspu->partial_spu) { gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } if (dvdspu->partial_spu != NULL) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) GST_WARNING_OBJECT (dvdspu, "Joining subpicture buffer with timestamp to previous"); dvdspu->partial_spu = gst_buffer_append (dvdspu->partial_spu, buf); } else { /* If we don't yet have a buffer, wait for one with a timestamp, * since that will avoid collecting the 2nd half of a partial buf */ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) dvdspu->partial_spu = buf; else gst_buffer_unref (buf); } if (dvdspu->partial_spu == NULL) goto done; size = gst_buffer_get_size (dvdspu->partial_spu); switch (dvdspu->spu_input_type) { case SPU_INPUT_TYPE_VOBSUB: if (size > 4) { guint8 header[2]; guint16 packet_size; gst_buffer_extract (dvdspu->partial_spu, 0, header, 2); packet_size = GST_READ_UINT16_BE (header); if (packet_size == size) { submit_new_spu_packet (dvdspu, dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else if (packet_size < size) { /* Somehow we collected too much - something is wrong. Drop the * packet entirely and wait for a new one */ GST_DEBUG_OBJECT (dvdspu, "Discarding invalid SPU buffer of size %" G_GSIZE_FORMAT, size); gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else { GST_LOG_OBJECT (dvdspu, "SPU buffer claims to be of size %u. Collected %" G_GSIZE_FORMAT " so far.", packet_size, size); } } break; case SPU_INPUT_TYPE_PGS:{ /* Collect until we have a command buffer that ends exactly at the size * we've collected */ guint8 packet_type; guint16 packet_size; GstMapInfo map; guint8 *ptr, *end; gboolean invalid = FALSE; gst_buffer_map (dvdspu->partial_spu, &map, GST_MAP_READ); ptr = map.data; end = ptr + map.size; /* FIXME: There's no need to walk the command set each time. We can set a * marker and resume where we left off next time */ /* FIXME: Move the packet parsing and sanity checking into the format-specific modules */ while (ptr != end) { if (ptr + 3 > end) break; packet_type = *ptr++; packet_size = GST_READ_UINT16_BE (ptr); ptr += 2; if (ptr + packet_size > end) break; ptr += packet_size; /* 0x80 is the END command for PGS packets */ if (packet_type == 0x80 && ptr != end) { /* Extra cruft on the end of the packet -> assume invalid */ invalid = TRUE; break; } } gst_buffer_unmap (dvdspu->partial_spu, &map); if (invalid) { gst_buffer_unref (dvdspu->partial_spu); dvdspu->partial_spu = NULL; } else if (ptr == end) { GST_DEBUG_OBJECT (dvdspu, "Have complete PGS packet of size %" G_GSIZE_FORMAT ". Enqueueing.", map.size); submit_new_spu_packet (dvdspu, dvdspu->partial_spu); dvdspu->partial_spu = NULL; } break; } default: GST_ERROR_OBJECT (dvdspu, "Input type not configured before SPU passing"); goto caps_not_set; } done: DVD_SPU_UNLOCK (dvdspu); return ret; /* ERRORS */ caps_not_set: { GST_ELEMENT_ERROR (dvdspu, RESOURCE, NO_SPACE_LEFT, (_("Subpicture format was not configured before data flow")), (NULL)); ret = GST_FLOW_ERROR; goto done; } }
static GstFlowReturn gst_interleave_collected (GstCollectPads * pads, GstInterleave * self) { guint size; GstBuffer *outbuf = NULL; GstFlowReturn ret = GST_FLOW_OK; GSList *collected; guint nsamples; guint ncollected = 0; gboolean empty = TRUE; gint width = self->width / 8; GstMapInfo write_info; GstClockTime timestamp = -1; /* FIXME: send caps and tags after stream-start */ #if 0 if (self->send_stream_start) { gchar s_id[32]; /* stream-start (FIXME: create id based on input ids) */ g_snprintf (s_id, sizeof (s_id), "interleave-%08x", g_random_int ()); gst_pad_push_event (self->src, gst_event_new_stream_start (s_id)); self->send_stream_start = FALSE; } #endif size = gst_collect_pads_available (pads); if (size == 0) goto eos; g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->width > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->channels > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (self->rate > 0, GST_FLOW_NOT_NEGOTIATED); g_return_val_if_fail (size % width == 0, GST_FLOW_ERROR); GST_DEBUG_OBJECT (self, "Starting to collect %u bytes from %d channels", size, self->channels); nsamples = size / width; outbuf = gst_buffer_new_allocate (NULL, size * self->channels, NULL); if (outbuf == NULL || gst_buffer_get_size (outbuf) < size * self->channels) { gst_buffer_unref (outbuf); return GST_FLOW_NOT_NEGOTIATED; } gst_buffer_map (outbuf, &write_info, GST_MAP_WRITE); memset (write_info.data, 0, size * self->channels); for (collected = pads->data; collected != NULL; collected = collected->next) { GstCollectData *cdata; GstBuffer *inbuf; guint8 *outdata; GstMapInfo input_info; cdata = (GstCollectData *) collected->data; inbuf = gst_collect_pads_take_buffer (pads, cdata, size); if (inbuf == NULL) { GST_DEBUG_OBJECT (cdata->pad, "No buffer available"); goto next; } ncollected++; gst_buffer_map (inbuf, &input_info, GST_MAP_READ); if (timestamp == -1) timestamp = GST_BUFFER_TIMESTAMP (inbuf); if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP)) goto next; empty = FALSE; outdata = write_info.data + width * GST_INTERLEAVE_PAD_CAST (cdata->pad)->channel; self->func (outdata, input_info.data, self->channels, nsamples); gst_buffer_unmap (inbuf, &input_info); next: if (inbuf) gst_buffer_unref (inbuf); } if (ncollected == 0) { gst_buffer_unmap (outbuf, &write_info); goto eos; } GST_OBJECT_LOCK (self); if (self->pending_segment) { GstEvent *event; GstSegment segment; event = self->pending_segment; self->pending_segment = NULL; GST_OBJECT_UNLOCK (self); /* convert the input segment to time now */ gst_event_copy_segment (event, &segment); if (segment.format != GST_FORMAT_TIME) { gst_event_unref (event); /* not time, convert */ switch (segment.format) { case GST_FORMAT_BYTES: segment.start *= width; if (segment.stop != -1) segment.stop *= width; if (segment.position != -1) segment.position *= width; /* fallthrough for the samples case */ case GST_FORMAT_DEFAULT: segment.start = gst_util_uint64_scale_int (segment.start, GST_SECOND, self->rate); if (segment.stop != -1) segment.stop = gst_util_uint64_scale_int (segment.stop, GST_SECOND, self->rate); if (segment.position != -1) segment.position = gst_util_uint64_scale_int (segment.position, GST_SECOND, self->rate); break; default: GST_WARNING ("can't convert segment values"); segment.start = 0; segment.stop = -1; segment.position = 0; break; } event = gst_event_new_segment (&segment); } gst_pad_push_event (self->src, event); GST_OBJECT_LOCK (self); } GST_OBJECT_UNLOCK (self); if (timestamp != -1) { self->offset = gst_util_uint64_scale_int (timestamp, self->rate, GST_SECOND); self->timestamp = timestamp; } GST_BUFFER_TIMESTAMP (outbuf) = self->timestamp; GST_BUFFER_OFFSET (outbuf) = self->offset; self->offset += nsamples; self->timestamp = gst_util_uint64_scale_int (self->offset, GST_SECOND, self->rate); GST_BUFFER_DURATION (outbuf) = self->timestamp - GST_BUFFER_TIMESTAMP (outbuf); if (empty) GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP); gst_buffer_unmap (outbuf, &write_info); GST_LOG_OBJECT (self, "pushing outbuf, timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf))); ret = gst_pad_push (self->src, outbuf); return ret; eos: { GST_DEBUG_OBJECT (self, "no data available, must be EOS"); if (outbuf) gst_buffer_unref (outbuf); gst_pad_push_event (self->src, gst_event_new_eos ()); return GST_FLOW_EOS; } }
static GstFlowReturn mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad) { /* Create a steam. Fill in codec specific information */ GstFlowReturn ret = GST_FLOW_ERROR; GstCaps *caps; GstStructure *s; gboolean is_video = FALSE; caps = gst_pad_get_current_caps (pad); if (caps == NULL) { GST_DEBUG_OBJECT (pad, "Sink pad caps were not set before pushing"); return GST_FLOW_NOT_NEGOTIATED; } s = gst_caps_get_structure (caps, 0); g_return_val_if_fail (s != NULL, FALSE); if (gst_structure_has_name (s, "video/x-dirac")) { GST_DEBUG_OBJECT (pad, "Creating Dirac stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_DIRAC); is_video = TRUE; } else if (gst_structure_has_name (s, "audio/x-ac3")) { GST_DEBUG_OBJECT (pad, "Creating AC3 stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_AC3); } else if (gst_structure_has_name (s, "audio/x-dts")) { GST_DEBUG_OBJECT (pad, "Creating DTS stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_DTS); } else if (gst_structure_has_name (s, "audio/x-lpcm")) { GST_DEBUG_OBJECT (pad, "Creating LPCM stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_LPCM); } else if (gst_structure_has_name (s, "video/x-h264")) { const GValue *value; GST_DEBUG_OBJECT (pad, "Creating H264 stream"); /* Codec data contains SPS/PPS which need to go in stream for valid ES */ value = gst_structure_get_value (s, "codec_data"); if (value) { ps_data->codec_data = gst_buffer_ref (gst_value_get_buffer (value)); GST_DEBUG_OBJECT (pad, "%" G_GSIZE_FORMAT " bytes of codec data", gst_buffer_get_size (ps_data->codec_data)); ps_data->prepare_func = mpegpsmux_prepare_h264; } else { ps_data->codec_data = NULL; } ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_H264); is_video = TRUE; } else if (gst_structure_has_name (s, "audio/mpeg")) { gint mpegversion; if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) { GST_ELEMENT_ERROR (pad, STREAM, FORMAT, ("Invalid data format presented"), ("Caps with type audio/mpeg did not have mpegversion")); goto beach; } switch (mpegversion) { case 1: GST_DEBUG_OBJECT (pad, "Creating MPEG Audio, version 1 stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_AUDIO_MPEG1); break; case 2: GST_DEBUG_OBJECT (pad, "Creating MPEG Audio, version 2 stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_AUDIO_MPEG2); break; case 4: { const GValue *value; /* Codec data contains SPS/PPS which need to go in stream for valid ES */ GST_DEBUG_OBJECT (pad, "Creating MPEG Audio, version 4 stream"); value = gst_structure_get_value (s, "codec_data"); if (value) { ps_data->codec_data = gst_buffer_ref (gst_value_get_buffer (value)); GST_DEBUG_OBJECT (pad, "%" G_GSIZE_FORMAT " bytes of codec data", gst_buffer_get_size (ps_data->codec_data)); ps_data->prepare_func = mpegpsmux_prepare_aac; } else { ps_data->codec_data = NULL; } ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_AUDIO_AAC); break; } default: GST_WARNING_OBJECT (pad, "unsupported mpegversion %d", mpegversion); goto beach; } } else if (gst_structure_has_name (s, "video/mpeg")) { gint mpegversion; if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) { GST_ELEMENT_ERROR (mux, STREAM, FORMAT, ("Invalid data format presented"), ("Caps with type video/mpeg did not have mpegversion")); goto beach; } if (mpegversion == 1) { GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 1 stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG1); } else if (mpegversion == 2) { GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 2 stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG2); } else { GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 4 stream"); ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG4); } is_video = TRUE; } if (ps_data->stream != NULL) { ps_data->stream_id = ps_data->stream->stream_id; ps_data->stream_id_ext = ps_data->stream->stream_id_ext; GST_DEBUG_OBJECT (pad, "Stream created, stream_id=%04x, stream_id_ext=%04x", ps_data->stream_id, ps_data->stream_id_ext); gst_structure_get_int (s, "rate", &ps_data->stream->audio_sampling); gst_structure_get_int (s, "channels", &ps_data->stream->audio_channels); gst_structure_get_int (s, "bitrate", &ps_data->stream->audio_bitrate); ret = GST_FLOW_OK; if (is_video && mux->video_stream_id == 0) { mux->video_stream_id = ps_data->stream_id; GST_INFO_OBJECT (mux, "video pad stream_id 0x%02x", mux->video_stream_id); } } beach: return ret; }
static GstFlowReturn gst_raw_base_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, gint * skipsize) { gsize in_size, out_size; guint frame_size; guint num_out_frames; gsize units_n, units_d; guint64 buffer_duration; GstFlowReturn flow_ret = GST_FLOW_OK; GstEvent *new_caps_event = NULL; GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse); GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse); g_assert (klass->is_config_ready); g_assert (klass->get_caps_from_config); g_assert (klass->get_config_frame_size); g_assert (klass->get_units_per_second); /* We never skip any bytes this way. Instead, subclass takes care * of skipping any overhead (necessary, since the way it needs to * be skipped is completely subclass specific). */ *skipsize = 0; /* The operations below access the current config. Protect * against race conditions by using the object lock. */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse); /* If the source pad caps haven't been set yet, or need to be * set again, do so now, BEFORE any buffers are pushed out */ if (G_UNLIKELY (!raw_base_parse->src_caps_set)) { GstCaps *new_src_caps; if (G_UNLIKELY (!klass->is_config_ready (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT))) { /* The current configuration is not ready. No caps can be * generated out of it. * The most likely reason for this is that the sink caps config * is the current one and no valid sink caps have been pushed * by upstream. Report the problem and exit. */ if (gst_raw_base_parse_is_using_sink_caps (raw_base_parse)) { goto config_not_ready; } else { /* This should not be reached if the property config is active */ g_assert_not_reached (); } } GST_DEBUG_OBJECT (parse, "setting src caps since this has not been done yet"); /* Convert the current config to a caps structure to * inform downstream about the new format */ if (!klass->get_caps_from_config (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT, &new_src_caps)) { GST_ERROR_OBJECT (raw_base_parse, "could not get src caps from current config"); flow_ret = GST_FLOW_NOT_NEGOTIATED; goto error_locked; } new_caps_event = gst_event_new_caps (new_src_caps); gst_caps_unref (new_src_caps); raw_base_parse->src_caps_set = TRUE; } frame_size = klass->get_config_frame_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); in_size = gst_buffer_get_size (frame->buffer); /* gst_base_parse_set_min_frame_size() is called when the current * configuration changes and the change affects the frame size. This * means that a buffer must contain at least as many bytes as indicated * by the frame size. If there are fewer inside an error occurred; * either something in the parser went wrong, or the min frame size * wasn't updated properly. */ g_assert (in_size >= frame_size); /* Determine how many complete frames would fit in the input buffer. * Then check if this amount exceeds the maximum number of frames * as indicated by the subclass. */ num_out_frames = (in_size / frame_size); if (klass->get_max_frames_per_buffer) { guint max_num_out_frames = klass->get_max_frames_per_buffer (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT); num_out_frames = MIN (num_out_frames, max_num_out_frames); } /* Ensure that the size of the buffers that get pushed downstream * is always an integer multiple of the frame size to prevent cases * where downstream gets buffers with incomplete frames. */ out_size = num_out_frames * frame_size; /* Set the overhead size to ensure that timestamping excludes these * extra overhead bytes. */ frame->overhead = klass->get_overhead_size ? klass->get_overhead_size (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT) : 0; g_assert (out_size >= (guint) (frame->overhead)); out_size -= frame->overhead; GST_LOG_OBJECT (raw_base_parse, "%" G_GSIZE_FORMAT " bytes input %" G_GSIZE_FORMAT " bytes output (%u frame(s)) %d bytes overhead", in_size, out_size, num_out_frames, frame->overhead); /* Calculate buffer duration */ klass->get_units_per_second (raw_base_parse, GST_FORMAT_BYTES, GST_RAW_BASE_PARSE_CONFIG_CURRENT, &units_n, &units_d); if (units_n == 0 || units_d == 0) buffer_duration = GST_CLOCK_TIME_NONE; else buffer_duration = gst_util_uint64_scale (out_size, GST_SECOND * units_d, units_n); if (klass->process) { GstBuffer *processed_data = NULL; if (!klass->process (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT, frame->buffer, in_size, out_size, &processed_data)) goto process_error; frame->out_buffer = processed_data; } else { frame->out_buffer = NULL; } /* Set the duration of the output buffer, or if none exists, of * the input buffer. Do this after the process() call, since in * case out_buffer is set, the subclass has created a new buffer. * Instead of requiring subclasses to set the duration (which * anyway must always be buffer_duration), let's do it here. */ if (frame->out_buffer != NULL) GST_BUFFER_DURATION (frame->out_buffer) = buffer_duration; else GST_BUFFER_DURATION (frame->buffer) = buffer_duration; /* Access to the current config is not needed in subsequent * operations, so the lock can be released */ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); /* If any new caps have to be pushed downstrean, do so * *before* the frame is finished */ if (G_UNLIKELY (new_caps_event != NULL)) { gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (raw_base_parse), new_caps_event); new_caps_event = NULL; } gst_base_parse_finish_frame (parse, frame, out_size + frame->overhead); return flow_ret; config_not_ready: GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); GST_ELEMENT_ERROR (parse, STREAM, FORMAT, ("sink caps config is the current config, and it is not ready -" "upstream may not have pushed a caps event yet"), (NULL)); flow_ret = GST_FLOW_ERROR; goto error_end; process_error: GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); GST_ELEMENT_ERROR (parse, STREAM, DECODE, ("could not process data"), (NULL)); flow_ret = GST_FLOW_ERROR; goto error_end; error_locked: GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse); goto error_end; error_end: frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP; if (new_caps_event != NULL) gst_event_unref (new_caps_event); return flow_ret; }
static GstFlowReturn gst_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset, guint length, GstBuffer ** buffer) { GstBuffer *buf = NULL; GstDTMFSrcEvent *event; GstDTMFSrc *dtmfsrc; GstClock *clock; GstClockID *clockid; GstClockReturn clockret; dtmfsrc = GST_DTMF_SRC (basesrc); do { if (dtmfsrc->last_event == NULL) { GST_DEBUG_OBJECT (dtmfsrc, "popping"); event = g_async_queue_pop (dtmfsrc->event_queue); GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type); switch (event->event_type) { case DTMF_EVENT_TYPE_STOP: GST_WARNING_OBJECT (dtmfsrc, "Received a DTMF stop event when already stopped"); gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event); break; case DTMF_EVENT_TYPE_START: gst_dtmf_prepare_timestamps (dtmfsrc); event->packet_count = 0; dtmfsrc->last_event = event; event = NULL; gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed", dtmfsrc->last_event); break; case DTMF_EVENT_TYPE_PAUSE_TASK: /* * We're pushing it back because it has to stay in there until * the task is really paused (and the queue will then be flushed) */ GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task..."); GST_OBJECT_LOCK (dtmfsrc); if (dtmfsrc->paused) { g_async_queue_push (dtmfsrc->event_queue, event); goto paused_locked; } GST_OBJECT_UNLOCK (dtmfsrc); break; } if (event) g_slice_free (GstDTMFSrcEvent, event); } else if (dtmfsrc->last_event->packet_count * dtmfsrc->interval >= MIN_DUTY_CYCLE) { event = g_async_queue_try_pop (dtmfsrc->event_queue); if (event != NULL) { switch (event->event_type) { case DTMF_EVENT_TYPE_START: GST_WARNING_OBJECT (dtmfsrc, "Received two consecutive DTMF start events"); gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event); break; case DTMF_EVENT_TYPE_STOP: g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event); dtmfsrc->last_event = NULL; gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed", event); break; case DTMF_EVENT_TYPE_PAUSE_TASK: /* * We're pushing it back because it has to stay in there until * the task is really paused (and the queue will then be flushed) */ GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task..."); GST_OBJECT_LOCK (dtmfsrc); if (dtmfsrc->paused) { g_async_queue_push (dtmfsrc->event_queue, event); goto paused_locked; } GST_OBJECT_UNLOCK (dtmfsrc); break; } g_slice_free (GstDTMFSrcEvent, event); } } } while (dtmfsrc->last_event == NULL); GST_LOG_OBJECT (dtmfsrc, "end event check, now wait for the proper time"); clock = gst_element_get_clock (GST_ELEMENT (basesrc)); clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp + gst_element_get_base_time (GST_ELEMENT (dtmfsrc))); gst_object_unref (clock); GST_OBJECT_LOCK (dtmfsrc); if (!dtmfsrc->paused) { dtmfsrc->clockid = clockid; GST_OBJECT_UNLOCK (dtmfsrc); clockret = gst_clock_id_wait (clockid, NULL); GST_OBJECT_LOCK (dtmfsrc); if (dtmfsrc->paused) clockret = GST_CLOCK_UNSCHEDULED; } else { clockret = GST_CLOCK_UNSCHEDULED; } gst_clock_id_unref (clockid); dtmfsrc->clockid = NULL; GST_OBJECT_UNLOCK (dtmfsrc); if (clockret == GST_CLOCK_UNSCHEDULED) { goto paused; } buf = gst_dtmf_src_create_next_tone_packet (dtmfsrc, dtmfsrc->last_event); GST_LOG_OBJECT (dtmfsrc, "Created buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf)); *buffer = buf; return GST_FLOW_OK; paused_locked: GST_OBJECT_UNLOCK (dtmfsrc); paused: if (dtmfsrc->last_event) { GST_DEBUG_OBJECT (dtmfsrc, "Stopping current event"); /* Don't forget to release the stream lock */ g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event); dtmfsrc->last_event = NULL; } return GST_FLOW_FLUSHING; }
static GstFlowReturn gst_tcp_mix_src_pad_read (GstTCPMixSrcPad * pad, GstBuffer ** outbuf) { GstTCPMixSrc *src = GST_TCP_MIX_SRC (GST_PAD_PARENT (pad)); gssize avail, receivedBytes; GstMapInfo map; GError *err = NULL; /* if we have a client, wait for read */ GST_LOG_OBJECT (pad, "asked for a buffer"); if (!pad->client) { if (src->mode == MODE_LOOP) goto loop_read; else goto no_client; } /* read the buffer header */ read_available_bytes: avail = g_socket_get_available_bytes (pad->client); if (avail < 0) { goto socket_get_available_bytes_error; } else if (avail == 0) { GIOCondition condition; if (!g_socket_condition_wait (pad->client, G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP, pad->cancellable, &err)) goto socket_condition_wait_error; condition = g_socket_condition_check (pad->client, G_IO_IN | G_IO_PRI | G_IO_ERR | G_IO_HUP); if ((condition & G_IO_ERR)) goto socket_condition_error; else if ((condition & G_IO_HUP)) goto socket_condition_hup; avail = g_socket_get_available_bytes (pad->client); if (avail < 0) goto socket_get_available_bytes_error; } if (0 < avail) { gsize readBytes = MIN (avail, MAX_READ_SIZE); *outbuf = gst_buffer_new_and_alloc (readBytes); gst_buffer_map (*outbuf, &map, GST_MAP_READWRITE); receivedBytes = g_socket_receive (pad->client, (gchar *) map.data, readBytes, pad->cancellable, &err); } else { /* Connection closed */ receivedBytes = 0; *outbuf = NULL; } if (receivedBytes == 0) goto socket_connection_closed; else if (receivedBytes < 0) goto socket_receive_error; gst_buffer_unmap (*outbuf, &map); gst_buffer_resize (*outbuf, 0, receivedBytes); #if 0 GST_LOG_OBJECT (pad, "Returning buffer from _get of size %" G_GSIZE_FORMAT ", ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", offset %" G_GINT64_FORMAT ", offset_end %" G_GINT64_FORMAT, gst_buffer_get_size (*outbuf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (*outbuf)), GST_TIME_ARGS (GST_BUFFER_DURATION (*outbuf)), GST_BUFFER_OFFSET (*outbuf), GST_BUFFER_OFFSET_END (*outbuf)); #endif g_clear_error (&err); return GST_FLOW_OK; /* Handling Errors */ no_client: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("No client socket (%s)", GST_PAD_NAME (pad))); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_get_available_bytes_error: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Failed to get available bytes from socket")); gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_condition_wait_error: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Select failed: %s", err->message)); g_clear_error (&err); gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_condition_error: { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Socket in error state")); *outbuf = NULL; gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } socket_condition_hup: { GST_DEBUG_OBJECT (pad, "Connection closed"); *outbuf = NULL; gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_EOS; } socket_connection_closed: { GST_DEBUG_OBJECT (pad, "Connection closed"); if (*outbuf) { gst_buffer_unmap (*outbuf, &map); gst_buffer_unref (*outbuf); } *outbuf = NULL; gst_tcp_mix_src_pad_reset (pad); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_EOS; } socket_receive_error: { gst_buffer_unmap (*outbuf, &map); gst_buffer_unref (*outbuf); *outbuf = NULL; if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) { GST_DEBUG_OBJECT (pad, "Cancelled reading from socket"); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_FLUSHING; } else { GST_ELEMENT_ERROR (pad, RESOURCE, READ, (NULL), ("Failed to read from socket: %s", err->message)); if (src->mode == MODE_LOOP) goto loop_read; return GST_FLOW_ERROR; } } loop_read: { #if 0 GstEvent *event = gst_event_new_flush_start (); if (!gst_pad_push_event (pad, event)) { GST_ERROR_OBJECT (src, "Failed to flush data on %s.%s", GST_ELEMENT_NAME (src), GST_PAD_NAME (pad)); } #endif #if 0 GST_DEBUG_OBJECT (pad, "Looping"); #endif if (src->fill == FILL_NONE) { gst_tcp_mix_src_pad_wait_for_client (pad); goto read_available_bytes; } enum { buffer_size = 1024 }; *outbuf = gst_buffer_new_and_alloc (buffer_size); switch (src->fill) { case FILL_ZERO: break; case FILL_RAND: { guchar *p; gst_buffer_map (*outbuf, &map, GST_MAP_READWRITE); for (p = map.data; p < map.data + buffer_size; p += 4) { *((int *) p) = rand (); } } break; } return GST_FLOW_OK; } }
static GstFlowReturn gst_decklink_sink_videosink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) { GstDecklinkSink *decklinksink; IDeckLinkMutableVideoFrame *frame; void *data; GstFlowReturn ret; const GstDecklinkMode *mode; decklinksink = GST_DECKLINK_SINK (parent); #if 0 if (!decklinksink->video_enabled) { HRESULT ret; ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_WARNING ("failed to enable video output"); //return FALSE; } decklinksink->video_enabled = TRUE; } #endif mode = gst_decklink_get_mode (decklinksink->mode); decklinksink->output->CreateVideoFrame (mode->width, mode->height, mode->width * 2, decklinksink->pixel_format, bmdFrameFlagDefault, &frame); frame->GetBytes (&data); gst_buffer_extract (buffer, 0, data, gst_buffer_get_size (buffer)); gst_buffer_unref (buffer); g_mutex_lock (&decklinksink->mutex); while (decklinksink->queued_frames > 2 && !decklinksink->stop) { g_cond_wait (&decklinksink->cond, &decklinksink->mutex); } if (!decklinksink->stop) { decklinksink->queued_frames++; } g_mutex_unlock (&decklinksink->mutex); if (!decklinksink->stop) { decklinksink->output->ScheduleVideoFrame (frame, decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n); decklinksink->num_frames++; if (!decklinksink->sched_started) { decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0); decklinksink->sched_started = TRUE; } ret = GST_FLOW_OK; } else { ret = GST_FLOW_FLUSHING; } frame->Release (); return ret; }
static GstFlowReturn gst_rtp_base_audio_payload_handle_buffer (GstRTPBasePayload * basepayload, GstBuffer * buffer) { GstRTPBaseAudioPayload *payload; GstRTPBaseAudioPayloadPrivate *priv; guint payload_len; GstFlowReturn ret; guint available; guint min_payload_len; guint max_payload_len; guint align; guint size; gboolean discont; GstClockTime timestamp; ret = GST_FLOW_OK; payload = GST_RTP_BASE_AUDIO_PAYLOAD_CAST (basepayload); priv = payload->priv; timestamp = GST_BUFFER_PTS (buffer); discont = GST_BUFFER_IS_DISCONT (buffer); if (discont) { GST_DEBUG_OBJECT (payload, "Got DISCONT"); /* flush everything out of the adapter, mark DISCONT */ ret = gst_rtp_base_audio_payload_flush (payload, -1, -1); priv->discont = TRUE; /* get the distance between the timestamp gap and produce the same gap in * the RTP timestamps */ if (priv->last_timestamp != -1 && timestamp != -1) { /* we had a last timestamp, compare it to the new timestamp and update the * offset counter for RTP timestamps. The effect is that we will produce * output buffers containing the same RTP timestamp gap as the gap * between the GST timestamps. */ if (timestamp > priv->last_timestamp) { GstClockTime diff; guint64 bytes; /* we're only going to apply a positive gap, otherwise we let the marker * bit do its thing. simply convert to bytes and add the current * offset */ diff = timestamp - priv->last_timestamp; bytes = priv->time_to_bytes (payload, diff); priv->offset += bytes; GST_DEBUG_OBJECT (payload, "elapsed time %" GST_TIME_FORMAT ", bytes %" G_GUINT64_FORMAT ", new offset %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff), bytes, priv->offset); } } } if (!gst_rtp_base_audio_payload_get_lengths (basepayload, gst_rtp_base_payload_get_source_count (basepayload, buffer), &min_payload_len, &max_payload_len, &align)) goto config_error; GST_DEBUG_OBJECT (payload, "Calculated min_payload_len %u and max_payload_len %u", min_payload_len, max_payload_len); size = gst_buffer_get_size (buffer); /* shortcut, we don't need to use the adapter when the packet can be pushed * through directly. */ available = gst_adapter_available (priv->adapter); GST_DEBUG_OBJECT (payload, "got buffer size %u, available %u", size, available); if (available == 0 && (size >= min_payload_len && size <= max_payload_len) && (size % align == 0)) { /* If buffer fits on an RTP packet, let's just push it through * this will check against max_ptime and max_mtu */ GST_DEBUG_OBJECT (payload, "Fast packet push"); ret = gst_rtp_base_audio_payload_push_buffer (payload, buffer, timestamp); } else { /* push the buffer in the adapter */ gst_adapter_push (priv->adapter, buffer); available += size; GST_DEBUG_OBJECT (payload, "available now %u", available); /* as long as we have full frames */ /* TODO: Use buffer lists here */ while (available >= min_payload_len) { /* get multiple of alignment */ payload_len = MIN (max_payload_len, available); payload_len = ALIGN_DOWN (payload_len, align); /* and flush out the bytes from the adapter, automatically set the * timestamp. */ ret = gst_rtp_base_audio_payload_flush (payload, payload_len, -1); available -= payload_len; GST_DEBUG_OBJECT (payload, "available after push %u", available); } } return ret; /* ERRORS */ config_error: { GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL), ("subclass did not configure us properly")); gst_buffer_unref (buffer); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_monoscope_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf) { GstFlowReturn flow_ret = GST_FLOW_OK; GstMonoscope *monoscope; monoscope = GST_MONOSCOPE (parent); if (monoscope->rate == 0) { gst_buffer_unref (inbuf); flow_ret = GST_FLOW_NOT_NEGOTIATED; goto out; } /* Make sure have an output format */ flow_ret = ensure_negotiated (monoscope); if (flow_ret != GST_FLOW_OK) { gst_buffer_unref (inbuf); goto out; } /* don't try to combine samples from discont buffer */ if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) { gst_adapter_clear (monoscope->adapter); monoscope->next_ts = GST_CLOCK_TIME_NONE; } /* Match timestamps from the incoming audio */ if (GST_BUFFER_TIMESTAMP (inbuf) != GST_CLOCK_TIME_NONE) monoscope->next_ts = GST_BUFFER_TIMESTAMP (inbuf); GST_LOG_OBJECT (monoscope, "in buffer has %d samples, ts=%" GST_TIME_FORMAT, gst_buffer_get_size (inbuf) / monoscope->bps, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf))); gst_adapter_push (monoscope->adapter, inbuf); inbuf = NULL; /* Collect samples until we have enough for an output frame */ while (flow_ret == GST_FLOW_OK) { gint16 *samples; GstBuffer *outbuf = NULL; guint32 *pixels, avail, bytesperframe; avail = gst_adapter_available (monoscope->adapter); GST_LOG_OBJECT (monoscope, "bytes avail now %u", avail); bytesperframe = monoscope->spf * monoscope->bps; if (avail < bytesperframe) break; /* FIXME: something is wrong with QoS, we are skipping way too much * stuff even with very low CPU loads */ #if 0 if (monoscope->next_ts != -1) { gboolean need_skip; gint64 qostime; qostime = gst_segment_to_running_time (&monoscope->segment, GST_FORMAT_TIME, monoscope->next_ts); GST_OBJECT_LOCK (monoscope); /* check for QoS, don't compute buffers that are known to be late */ need_skip = GST_CLOCK_TIME_IS_VALID (monoscope->earliest_time) && qostime <= monoscope->earliest_time; GST_OBJECT_UNLOCK (monoscope); if (need_skip) { GST_WARNING_OBJECT (monoscope, "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (monoscope->earliest_time)); goto skip; } } #endif samples = (gint16 *) gst_adapter_map (monoscope->adapter, bytesperframe); if (monoscope->spf < 512) { gint16 in_data[512], i; for (i = 0; i < 512; ++i) { gdouble off; off = ((gdouble) i * (gdouble) monoscope->spf) / 512.0; in_data[i] = samples[MIN ((guint) off, monoscope->spf)]; } pixels = monoscope_update (monoscope->visstate, in_data); } else { /* not really correct, but looks much prettier */ pixels = monoscope_update (monoscope->visstate, samples); } GST_LOG_OBJECT (monoscope, "allocating output buffer"); flow_ret = gst_buffer_pool_acquire_buffer (monoscope->pool, &outbuf, NULL); if (flow_ret != GST_FLOW_OK) { gst_adapter_unmap (monoscope->adapter); goto out; } gst_buffer_fill (outbuf, 0, pixels, monoscope->outsize); GST_BUFFER_TIMESTAMP (outbuf) = monoscope->next_ts; GST_BUFFER_DURATION (outbuf) = monoscope->frame_duration; flow_ret = gst_pad_push (monoscope->srcpad, outbuf); #if 0 skip: #endif if (GST_CLOCK_TIME_IS_VALID (monoscope->next_ts)) monoscope->next_ts += monoscope->frame_duration; gst_adapter_flush (monoscope->adapter, bytesperframe); } out: return flow_ret; }
static void gst_wildmidi_loop (GstPad * sinkpad) { GstWildmidi *wildmidi = GST_WILDMIDI (GST_PAD_PARENT (sinkpad)); GstFlowReturn ret; switch (wildmidi->state) { case GST_WILDMIDI_STATE_LOAD: { GstBuffer *buffer = NULL; GST_DEBUG_OBJECT (wildmidi, "loading song"); ret = gst_pad_pull_range (wildmidi->sinkpad, wildmidi->offset, -1, &buffer); if (ret == GST_FLOW_EOS) { GST_DEBUG_OBJECT (wildmidi, "Song loaded"); wildmidi->state = GST_WILDMIDI_STATE_PARSE; } else if (ret != GST_FLOW_OK) { GST_ELEMENT_ERROR (wildmidi, STREAM, DECODE, (NULL), ("Unable to read song")); goto pause; } else { GST_DEBUG_OBJECT (wildmidi, "pushing buffer"); gst_adapter_push (wildmidi->adapter, buffer); wildmidi->offset += gst_buffer_get_size (buffer); } break; } case GST_WILDMIDI_STATE_PARSE: ret = gst_wildmidi_parse_song (wildmidi); if (ret != GST_FLOW_OK) goto pause; wildmidi->state = GST_WILDMIDI_STATE_PLAY; break; case GST_WILDMIDI_STATE_PLAY: ret = gst_wildmidi_do_play (wildmidi); if (ret != GST_FLOW_OK) goto pause; break; default: break; } return; pause: { const gchar *reason = gst_flow_get_name (ret); GstEvent *event; GST_DEBUG_OBJECT (wildmidi, "pausing task, reason %s", reason); gst_pad_pause_task (sinkpad); if (ret == GST_FLOW_EOS) { /* perform EOS logic */ event = gst_event_new_eos (); gst_pad_push_event (wildmidi->srcpad, event); } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) { event = gst_event_new_eos (); /* for fatal errors we post an error message, post the error * first so the app knows about the error first. */ GST_ELEMENT_ERROR (wildmidi, STREAM, FAILED, ("Internal data flow error."), ("streaming task paused, reason %s (%d)", reason, ret)); gst_pad_push_event (wildmidi->srcpad, event); } } }
/* FIXME: this entire function looks completely wrong, if there is or * was a bug in GStreamer it should be fixed there (tpm) */ static GstPadProbeReturn brasero_transcode_buffer_handler (GstPad *pad, GstPadProbeInfo *info, gpointer user_data) { BraseroTranscodePrivate *priv; BraseroTranscode *self = user_data; GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info); GstPad *peer; gint64 size; priv = BRASERO_TRANSCODE_PRIVATE (self); size = gst_buffer_get_size (buffer); if (priv->segment_start <= 0 && priv->segment_end <= 0) return GST_PAD_PROBE_OK; /* what we do here is more or less what gstreamer does when seeking: * it reads and process from 0 to the seek position (I tried). * It even forwards the data before the seek position to the sink (which * is a problem in our case as it would be written) */ if (priv->size > priv->segment_end) { priv->size += size; return GST_PAD_PROBE_DROP; } if (priv->size + size > priv->segment_end) { GstBuffer *new_buffer; int data_size; /* the entire the buffer is not interesting for us */ /* create a new buffer and push it on the pad: * NOTE: we're going to receive it ... */ data_size = priv->segment_end - priv->size; new_buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_METADATA, 0, data_size); /* FIXME: we can now modify the probe buffer in 0.11 */ /* Recursive: the following calls ourselves BEFORE we finish */ peer = gst_pad_get_peer (pad); gst_pad_push (peer, new_buffer); priv->size += size - data_size; /* post an EOS event to stop pipeline */ gst_pad_push_event (peer, gst_event_new_eos ()); gst_object_unref (peer); return GST_PAD_PROBE_DROP; } /* see if the buffer is in the segment */ if (priv->size < priv->segment_start) { GstBuffer *new_buffer; gint data_size; /* see if all the buffer is interesting for us */ if (priv->size + size < priv->segment_start) { priv->size += size; return GST_PAD_PROBE_DROP; } /* create a new buffer and push it on the pad: * NOTE: we're going to receive it ... */ data_size = priv->size + size - priv->segment_start; new_buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_METADATA, size - data_size, data_size); /* FIXME: this looks dodgy (tpm) */ GST_BUFFER_TIMESTAMP (new_buffer) = GST_BUFFER_TIMESTAMP (buffer) + data_size; /* move forward by the size of bytes we dropped */ priv->size += size - data_size; /* FIXME: we can now modify the probe buffer in 0.11 */ /* this is recursive the following calls ourselves * BEFORE we finish */ peer = gst_pad_get_peer (pad); gst_pad_push (peer, new_buffer); gst_object_unref (peer); return GST_PAD_PROBE_DROP; } priv->size += size; priv->pos += size; return GST_PAD_PROBE_OK; }
static GstFlowReturn gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstIdentity *identity = GST_IDENTITY (trans); GstClockTime rundts = GST_CLOCK_TIME_NONE; GstClockTime runpts = GST_CLOCK_TIME_NONE; GstClockTime ts, duration, runtimestamp; gsize size; size = gst_buffer_get_size (buf); if (identity->check_imperfect_timestamp) gst_identity_check_imperfect_timestamp (identity, buf); if (identity->check_imperfect_offset) gst_identity_check_imperfect_offset (identity, buf); /* update prev values */ identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf); identity->prev_duration = GST_BUFFER_DURATION (buf); identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf); identity->prev_offset = GST_BUFFER_OFFSET (buf); if (identity->error_after >= 0) { identity->error_after--; if (identity->error_after == 0) goto error_after; } if (identity->drop_probability > 0.0) { if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) goto dropped; } if (identity->dump) { GstMapInfo info; gst_buffer_map (buf, &info, GST_MAP_READ); gst_util_dump_mem (info.data, info.size); gst_buffer_unmap (buf, &info); } if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "chain", buf, size); } if (identity->datarate > 0) { GstClockTime time = gst_util_uint64_scale_int (identity->offset, GST_SECOND, identity->datarate); GST_BUFFER_PTS (buf) = GST_BUFFER_DTS (buf) = time; GST_BUFFER_DURATION (buf) = size * GST_SECOND / identity->datarate; } if (identity->signal_handoffs) g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf); if (trans->segment.format == GST_FORMAT_TIME) { rundts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_DTS (buf)); runpts = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME, GST_BUFFER_PTS (buf)); } if (GST_CLOCK_TIME_IS_VALID (rundts)) runtimestamp = rundts; else if (GST_CLOCK_TIME_IS_VALID (runpts)) runtimestamp = runpts; else runtimestamp = 0; ret = gst_identity_do_sync (identity, runtimestamp); identity->offset += size; if (identity->sleep_time && ret == GST_FLOW_OK) g_usleep (identity->sleep_time); if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME) && (ret == GST_FLOW_OK)) { GST_BUFFER_DTS (buf) = rundts; GST_BUFFER_PTS (buf) = runpts; GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE; GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE; } return ret; /* ERRORS */ error_after: { GST_ELEMENT_ERROR (identity, CORE, FAILED, (_("Failed after iterations as requested.")), (NULL)); return GST_FLOW_ERROR; } dropped: { if (!identity->silent) { gst_identity_update_last_message_for_buffer (identity, "dropping", buf, size); } ts = GST_BUFFER_TIMESTAMP (buf); if (GST_CLOCK_TIME_IS_VALID (ts)) { duration = GST_BUFFER_DURATION (buf); gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (identity), gst_event_new_gap (ts, duration)); } /* return DROPPED to basetransform. */ return GST_BASE_TRANSFORM_FLOW_DROPPED; } }
static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer) { GstFlowReturn res = GST_FLOW_OK; gsize size; guint8 *data; GstBuffer *outbuf, *bufd; gint16 *out_data; int n, err; int samples; unsigned int packet_size; GstBuffer *buf; GstMapInfo map, omap; GstAudioClippingMeta *cmeta = NULL; if (dec->state == NULL) { /* If we did not get any headers, default to 2 channels */ if (dec->n_channels == 0) { GST_INFO_OBJECT (dec, "No header, assuming single stream"); dec->n_channels = 2; dec->sample_rate = 48000; /* default stereo mapping */ dec->channel_mapping_family = 0; dec->channel_mapping[0] = 0; dec->channel_mapping[1] = 1; dec->n_streams = 1; dec->n_stereo_streams = 1; if (!gst_opus_dec_negotiate (dec, NULL)) return GST_FLOW_NOT_NEGOTIATED; } if (dec->n_channels == 2 && dec->n_streams == 1 && dec->n_stereo_streams == 0) { /* if we are automatically decoding 2 channels, but only have a single encoded one, direct both channels to it */ dec->channel_mapping[1] = 0; } GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz", dec->n_channels, dec->sample_rate); #ifndef GST_DISABLE_GST_DEBUG gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug, "Mapping table", dec->n_channels, dec->channel_mapping); #endif GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams, dec->n_stereo_streams); dec->state = opus_multistream_decoder_create (dec->sample_rate, dec->n_channels, dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err); if (!dec->state || err != OPUS_OK) goto creation_failed; } if (buffer) { GST_DEBUG_OBJECT (dec, "Received buffer of size %" G_GSIZE_FORMAT, gst_buffer_get_size (buffer)); } else { GST_DEBUG_OBJECT (dec, "Received missing buffer"); } /* if using in-band FEC, we introdude one extra frame's delay as we need to potentially wait for next buffer to decode a missing buffer */ if (dec->use_inband_fec && !dec->primed) { GST_DEBUG_OBJECT (dec, "First buffer received in FEC mode, early out"); gst_buffer_replace (&dec->last_buffer, buffer); dec->primed = TRUE; goto done; } /* That's the buffer we'll be sending to the opus decoder. */ buf = (dec->use_inband_fec && gst_buffer_get_size (dec->last_buffer) > 0) ? dec->last_buffer : buffer; /* That's the buffer we get duration from */ bufd = dec->use_inband_fec ? dec->last_buffer : buffer; if (buf && gst_buffer_get_size (buf) > 0) { gst_buffer_map (buf, &map, GST_MAP_READ); data = map.data; size = map.size; GST_DEBUG_OBJECT (dec, "Using buffer of size %" G_GSIZE_FORMAT, size); } else { /* concealment data, pass NULL as the bits parameters */ GST_DEBUG_OBJECT (dec, "Using NULL buffer"); data = NULL; size = 0; } if (gst_buffer_get_size (bufd) == 0) { GstClockTime const opus_plc_alignment = 2500 * GST_USECOND; GstClockTime aligned_missing_duration; GstClockTime missing_duration = GST_BUFFER_DURATION (bufd); if (!GST_CLOCK_TIME_IS_VALID (missing_duration) || missing_duration == 0) { if (GST_CLOCK_TIME_IS_VALID (dec->last_known_buffer_duration)) { missing_duration = dec->last_known_buffer_duration; GST_WARNING_OBJECT (dec, "Missing duration, using last duration %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration)); } else { GST_WARNING_OBJECT (dec, "Missing buffer, but unknown duration, and no previously known duration, assuming 20 ms"); missing_duration = 20 * GST_MSECOND; } } GST_DEBUG_OBJECT (dec, "missing buffer, doing PLC duration %" GST_TIME_FORMAT " plus leftover %" GST_TIME_FORMAT, GST_TIME_ARGS (missing_duration), GST_TIME_ARGS (dec->leftover_plc_duration)); /* add the leftover PLC duration to that of the buffer */ missing_duration += dec->leftover_plc_duration; /* align the combined buffer and leftover PLC duration to multiples * of 2.5ms, rounding to nearest, and store excess duration for later */ aligned_missing_duration = ((missing_duration + opus_plc_alignment / 2) / opus_plc_alignment) * opus_plc_alignment; dec->leftover_plc_duration = missing_duration - aligned_missing_duration; /* Opus' PLC cannot operate with less than 2.5ms; skip PLC * and accumulate the missing duration in the leftover_plc_duration * for the next PLC attempt */ if (aligned_missing_duration < opus_plc_alignment) { GST_DEBUG_OBJECT (dec, "current duration %" GST_TIME_FORMAT " of missing data not enough for PLC (minimum needed: %" GST_TIME_FORMAT ") - skipping", GST_TIME_ARGS (missing_duration), GST_TIME_ARGS (opus_plc_alignment)); goto done; } /* convert the duration (in nanoseconds) to sample count */ samples = gst_util_uint64_scale_int (aligned_missing_duration, dec->sample_rate, GST_SECOND); GST_DEBUG_OBJECT (dec, "calculated PLC frame length: %" GST_TIME_FORMAT " num frame samples: %d new leftover: %" GST_TIME_FORMAT, GST_TIME_ARGS (aligned_missing_duration), samples, GST_TIME_ARGS (dec->leftover_plc_duration)); } else { /* use maximum size (120 ms) as the number of returned samples is not constant over the stream. */ samples = 120 * dec->sample_rate / 1000; } packet_size = samples * dec->n_channels * 2; outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec), packet_size); if (!outbuf) { goto buffer_failed; } if (size > 0) dec->last_known_buffer_duration = packet_duration_opus (data, size); gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); out_data = (gint16 *) omap.data; do { if (dec->use_inband_fec) { if (gst_buffer_get_size (dec->last_buffer) > 0) { /* normal delayed decode */ GST_LOG_OBJECT (dec, "FEC enabled, decoding last delayed buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } else { /* FEC reconstruction decode */ GST_LOG_OBJECT (dec, "FEC enabled, reconstructing last buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 1); } } else { /* normal decode */ GST_LOG_OBJECT (dec, "FEC disabled, decoding buffer"); n = opus_multistream_decode (dec->state, data, size, out_data, samples, 0); } if (n == OPUS_BUFFER_TOO_SMALL) { /* if too small, add 2.5 milliseconds and try again, up to the * Opus max size of 120 milliseconds */ if (samples >= 120 * dec->sample_rate / 1000) break; samples += 25 * dec->sample_rate / 10000; packet_size = samples * dec->n_channels * 2; gst_buffer_unmap (outbuf, &omap); gst_buffer_unref (outbuf); outbuf = gst_audio_decoder_allocate_output_buffer (GST_AUDIO_DECODER (dec), packet_size); if (!outbuf) { goto buffer_failed; } gst_buffer_map (outbuf, &omap, GST_MAP_WRITE); out_data = (gint16 *) omap.data; } } while (n == OPUS_BUFFER_TOO_SMALL); gst_buffer_unmap (outbuf, &omap); if (data != NULL) gst_buffer_unmap (buf, &map); if (n < 0) { GstFlowReturn ret = GST_FLOW_ERROR; gst_buffer_unref (outbuf); GST_AUDIO_DECODER_ERROR (dec, 1, STREAM, DECODE, (NULL), ("Decoding error (%d): %s", n, opus_strerror (n)), ret); return ret; } GST_DEBUG_OBJECT (dec, "decoded %d samples", n); gst_buffer_set_size (outbuf, n * 2 * dec->n_channels); GST_BUFFER_DURATION (outbuf) = samples * GST_SECOND / dec->sample_rate; samples = n; cmeta = gst_buffer_get_audio_clipping_meta (buf); g_assert (!cmeta || cmeta->format == GST_FORMAT_DEFAULT); /* Skip any samples that need skipping */ if (cmeta && cmeta->start) { guint pre_skip = cmeta->start; guint scaled_pre_skip = pre_skip * dec->sample_rate / 48000; guint skip = scaled_pre_skip > n ? n : scaled_pre_skip; guint scaled_skip = skip * 48000 / dec->sample_rate; gst_buffer_resize (outbuf, skip * 2 * dec->n_channels, -1); GST_INFO_OBJECT (dec, "Skipping %u samples at the beginning (%u at 48000 Hz)", skip, scaled_skip); } if (cmeta && cmeta->end) { guint post_skip = cmeta->end; guint scaled_post_skip = post_skip * dec->sample_rate / 48000; guint skip = scaled_post_skip > n ? n : scaled_post_skip; guint scaled_skip = skip * 48000 / dec->sample_rate; guint outsize = gst_buffer_get_size (outbuf); guint skip_bytes = skip * 2 * dec->n_channels; if (outsize > skip_bytes) outsize -= skip_bytes; else outsize = 0; gst_buffer_resize (outbuf, 0, outsize); GST_INFO_OBJECT (dec, "Skipping %u samples at the end (%u at 48000 Hz)", skip, scaled_skip); } if (gst_buffer_get_size (outbuf) == 0) { gst_buffer_unref (outbuf); outbuf = NULL; } else if (dec->opus_pos[0] != GST_AUDIO_CHANNEL_POSITION_INVALID) { gst_audio_buffer_reorder_channels (outbuf, GST_AUDIO_FORMAT_S16, dec->n_channels, dec->opus_pos, dec->info.position); } /* Apply gain */ /* Would be better off leaving this to a volume element, as this is a naive conversion that does too many int/float conversions. However, we don't have control over the pipeline... So make it optional if the user program wants to use a volume, but do it by default so the correct volume goes out by default */ if (dec->apply_gain && outbuf && dec->r128_gain) { gsize rsize; unsigned int i, nsamples; double volume = dec->r128_gain_volume; gint16 *samples; gst_buffer_map (outbuf, &omap, GST_MAP_READWRITE); samples = (gint16 *) omap.data; rsize = omap.size; GST_DEBUG_OBJECT (dec, "Applying gain: volume %f", volume); nsamples = rsize / 2; for (i = 0; i < nsamples; ++i) { int sample = (int) (samples[i] * volume + 0.5); samples[i] = sample < -32768 ? -32768 : sample > 32767 ? 32767 : sample; } gst_buffer_unmap (outbuf, &omap); } if (dec->use_inband_fec) { gst_buffer_replace (&dec->last_buffer, buffer); } res = gst_audio_decoder_finish_frame (GST_AUDIO_DECODER (dec), outbuf, 1); if (res != GST_FLOW_OK) GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res)); done: return res; creation_failed: GST_ELEMENT_ERROR (dec, LIBRARY, INIT, ("Failed to create Opus decoder"), ("Failed to create Opus decoder (%d): %s", err, opus_strerror (err))); return GST_FLOW_ERROR; buffer_failed: GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("Failed to create %u byte buffer", packet_size)); return GST_FLOW_ERROR; }
static GstFlowReturn gst_cutter_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstCutter *filter; GstMapInfo map; gint16 *in_data; gint bpf, rate; gsize in_size; guint num_samples; gdouble NCS = 0.0; /* Normalized Cumulative Square of buffer */ gdouble RMS = 0.0; /* RMS of signal in buffer */ gdouble NMS = 0.0; /* Normalized Mean Square of buffer */ GstBuffer *prebuf; /* pointer to a prebuffer element */ GstClockTime duration; filter = GST_CUTTER (parent); if (GST_AUDIO_INFO_FORMAT (&filter->info) == GST_AUDIO_FORMAT_UNKNOWN) goto not_negotiated; bpf = GST_AUDIO_INFO_BPF (&filter->info); rate = GST_AUDIO_INFO_RATE (&filter->info); gst_buffer_map (buf, &map, GST_MAP_READ); in_data = (gint16 *) map.data; in_size = map.size; GST_LOG_OBJECT (filter, "length of prerec buffer: %" GST_TIME_FORMAT, GST_TIME_ARGS (filter->pre_run_length)); /* calculate mean square value on buffer */ switch (GST_AUDIO_INFO_FORMAT (&filter->info)) { case GST_AUDIO_FORMAT_S16: num_samples = in_size / 2; gst_cutter_calculate_gint16 (in_data, num_samples, &NCS); NMS = NCS / num_samples; break; case GST_AUDIO_FORMAT_S8: num_samples = in_size; gst_cutter_calculate_gint8 ((gint8 *) in_data, num_samples, &NCS); NMS = NCS / num_samples; break; default: /* this shouldn't happen */ g_warning ("no mean square function for format"); break; } gst_buffer_unmap (buf, &map); filter->silent_prev = filter->silent; duration = gst_util_uint64_scale (in_size / bpf, GST_SECOND, rate); RMS = sqrt (NMS); /* if RMS below threshold, add buffer length to silent run length count * if not, reset */ GST_LOG_OBJECT (filter, "buffer stats: NMS %f, RMS %f, audio length %f", NMS, RMS, gst_guint64_to_gdouble (duration)); if (RMS < filter->threshold_level) filter->silent_run_length += gst_guint64_to_gdouble (duration); else { filter->silent_run_length = 0 * GST_SECOND; filter->silent = FALSE; } if (filter->silent_run_length > filter->threshold_length) /* it has been silent long enough, flag it */ filter->silent = TRUE; /* has the silent status changed ? if so, send right signal * and, if from silent -> not silent, flush pre_record buffer */ if (filter->silent != filter->silent_prev) { if (filter->silent) { GstMessage *m = gst_cutter_message_new (filter, FALSE, GST_BUFFER_TIMESTAMP (buf)); GST_DEBUG_OBJECT (filter, "signaling CUT_STOP"); gst_element_post_message (GST_ELEMENT (filter), m); } else { gint count = 0; GstMessage *m = gst_cutter_message_new (filter, TRUE, GST_BUFFER_TIMESTAMP (buf)); GST_DEBUG_OBJECT (filter, "signaling CUT_START"); gst_element_post_message (GST_ELEMENT (filter), m); /* first of all, flush current buffer */ GST_DEBUG_OBJECT (filter, "flushing buffer of length %" GST_TIME_FORMAT, GST_TIME_ARGS (filter->pre_run_length)); while (filter->pre_buffer) { prebuf = (g_list_first (filter->pre_buffer))->data; filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf); gst_pad_push (filter->srcpad, prebuf); ++count; } GST_DEBUG_OBJECT (filter, "flushed %d buffers", count); filter->pre_run_length = 0 * GST_SECOND; } } /* now check if we have to send the new buffer to the internal buffer cache * or to the srcpad */ if (filter->silent) { filter->pre_buffer = g_list_append (filter->pre_buffer, buf); filter->pre_run_length += gst_guint64_to_gdouble (duration); while (filter->pre_run_length > filter->pre_length) { GstClockTime pduration; gsize psize; prebuf = (g_list_first (filter->pre_buffer))->data; g_assert (GST_IS_BUFFER (prebuf)); psize = gst_buffer_get_size (prebuf); pduration = gst_util_uint64_scale (psize / bpf, GST_SECOND, rate); filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf); filter->pre_run_length -= gst_guint64_to_gdouble (pduration); /* only pass buffers if we don't leak */ if (!filter->leaky) ret = gst_pad_push (filter->srcpad, prebuf); else gst_buffer_unref (prebuf); } } else ret = gst_pad_push (filter->srcpad, buf); return ret; /* ERRORS */ not_negotiated: { return GST_FLOW_NOT_NEGOTIATED; } }
/* * Adds buffer sizes together. */ static void buffer_count_size (void *buffer, void *user_data) { guint *sum = (guint *) user_data; *sum += gst_buffer_get_size (buffer); }
static GstFlowReturn gst_dtsdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstFlowReturn ret = GST_FLOW_OK; GstDtsDec *dts = GST_DTSDEC (parent); gint first_access; if (dts->dvdmode) { guint8 data[2]; gsize size; gint offset, len; GstBuffer *subbuf; size = gst_buffer_get_size (buf); if (size < 2) goto not_enough_data; gst_buffer_extract (buf, 0, data, 2); first_access = (data[0] << 8) | data[1]; /* Skip the first_access header */ offset = 2; if (first_access > 1) { /* Length of data before first_access */ len = first_access - 1; if (len <= 0 || offset + len > size) goto bad_first_access_parameter; subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len); GST_BUFFER_TIMESTAMP (subbuf) = GST_CLOCK_TIME_NONE; ret = dts->base_chain (pad, parent, subbuf); if (ret != GST_FLOW_OK) { gst_buffer_unref (buf); goto done; } offset += len; len = size - offset; if (len > 0) { subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len); GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf); ret = dts->base_chain (pad, parent, subbuf); } gst_buffer_unref (buf); } else { /* first_access = 0 or 1, so if there's a timestamp it applies to the first byte */ subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, size - offset); GST_BUFFER_TIMESTAMP (subbuf) = GST_BUFFER_TIMESTAMP (buf); ret = dts->base_chain (pad, parent, subbuf); gst_buffer_unref (buf); } } else { ret = dts->base_chain (pad, parent, buf); } done: return ret; /* ERRORS */ not_enough_data: { GST_ELEMENT_ERROR (GST_ELEMENT (dts), STREAM, DECODE, (NULL), ("Insufficient data in buffer. Can't determine first_acess")); gst_buffer_unref (buf); return GST_FLOW_ERROR; } bad_first_access_parameter: { GST_ELEMENT_ERROR (GST_ELEMENT (dts), STREAM, DECODE, (NULL), ("Bad first_access parameter (%d) in buffer", first_access)); gst_buffer_unref (buf); return GST_FLOW_ERROR; } }
static gboolean gst_mpegv_parse_process_config (GstMpegvParse * mpvparse, GstBuffer * buf, guint size) { guint8 *data; guint8 *data_with_prefix; GstMapInfo map; gint i, offset; if (mpvparse->seq_offset < 4) { /* This shouldn't happen, but just in case... */ GST_WARNING_OBJECT (mpvparse, "Sequence header start code missing."); return FALSE; } gst_buffer_map (buf, &map, GST_MAP_READ); data = map.data + mpvparse->seq_offset; g_assert (size <= map.size); /* pointer to sequence header data including the start code prefix - used for codec private data */ data_with_prefix = data - 4; /* only do stuff if something new; only compare first 11 bytes, changes in quantiser matrix doesn't matter here. Also changing the matrices in codec_data seems to cause problem with decoders */ if (mpvparse->config && size == gst_buffer_get_size (mpvparse->config) && gst_buffer_memcmp (mpvparse->config, 0, data_with_prefix, MIN (size, 11)) == 0) { gst_buffer_unmap (buf, &map); return TRUE; } if (!gst_mpeg_video_parse_sequence_header (&mpvparse->sequencehdr, data, size - mpvparse->seq_offset, 0)) { GST_DEBUG_OBJECT (mpvparse, "failed to parse config data (size %d) at offset %d", size, mpvparse->seq_offset); gst_buffer_unmap (buf, &map); return FALSE; } GST_LOG_OBJECT (mpvparse, "accepting parsed config size %d", size); /* Set mpeg version, and parse sequence extension */ mpvparse->config_flags = FLAG_NONE; for (i = 0; i < mpvparse->ext_count; ++i) { offset = mpvparse->ext_offsets[i]; mpvparse->config_flags |= FLAG_MPEG2; if (offset < size) { if (gst_mpeg_video_parse_sequence_extension (&mpvparse->sequenceext, map.data, size, offset)) { GST_LOG_OBJECT (mpvparse, "Read Sequence Extension"); mpvparse->config_flags |= FLAG_SEQUENCE_EXT; } else if (gst_mpeg_video_parse_sequence_display_extension (&mpvparse->sequencedispext, map.data, size, offset)) { GST_LOG_OBJECT (mpvparse, "Read Sequence Display Extension"); mpvparse->config_flags |= FLAG_SEQUENCE_DISPLAY_EXT; } } } if (mpvparse->config_flags & FLAG_MPEG2) { /* Update the sequence header based on extensions */ GstMpegVideoSequenceExt *seqext = NULL; GstMpegVideoSequenceDisplayExt *seqdispext = NULL; if (mpvparse->config_flags & FLAG_SEQUENCE_EXT) seqext = &mpvparse->sequenceext; if (mpvparse->config_flags & FLAG_SEQUENCE_DISPLAY_EXT) seqdispext = &mpvparse->sequencedispext; gst_mpeg_video_finalise_mpeg2_sequence_header (&mpvparse->sequencehdr, seqext, seqdispext); } if (mpvparse->fps_num == 0 || mpvparse->fps_den == 0) { mpvparse->fps_num = mpvparse->sequencehdr.fps_n; mpvparse->fps_den = mpvparse->sequencehdr.fps_d; } /* parsing ok, so accept it as new config */ if (mpvparse->config != NULL) gst_buffer_unref (mpvparse->config); mpvparse->config = gst_buffer_new_and_alloc (size); gst_buffer_fill (mpvparse->config, 0, data_with_prefix, size); /* trigger src caps update */ mpvparse->update_caps = TRUE; gst_buffer_unmap (buf, &map); return TRUE; }
/* Fill */ static GstFlowReturn gst_android_video_source_fill(GstPushSrc * p_pushsrc, GstBuffer * p_buf) { GstAndroidVideoSource *p_src; int vcd_ret; static struct timeval time_of_day; static struct timeval window_time; static struct timeval start_time; int timeDiffUsec; static gint frame_count = 0; static gint frame_count_window = 0; GstBuffer *p_outbuf; GstMapInfo mem_info; gboolean ok; GA_LOGTRACE("ENTER %s --xx--> thread(%ld)", __FUNCTION__, pthread_self()); p_src = GST_ANDROIDVIDEOSOURCE(p_pushsrc); if (gst_buffer_get_size(p_buf) != p_src->m_bufSize) { GA_LOGWARN("%s: WARNING: gst_buffer_get_size(p_buf)==%d != p_src->m_bufSize==%d", __FUNCTION__, gst_buffer_get_size(p_buf), p_src->m_bufSize); goto fill_error_negotiation; } VCD_checkChangeCamera(p_src->m_devHandle); if (!p_src->vcdStarted) { AV_CHECK_ERR(VCD_start(p_src->m_devHandle), fill_error_vcd_start); p_src->vcdStarted = TRUE; } if (!frame_count) { // Only first time gettimeofday(&start_time, NULL); gettimeofday(&window_time, NULL); } frame_count++; frame_count_window++; gettimeofday(&time_of_day, NULL); timeDiffUsec = time_diff_usec(&time_of_day, &window_time); if (timeDiffUsec > p_src->log_interval || !p_src->log_interval) { int framerate; int framerateWindow; int timeDiffSec; timeDiffSec = time_diff_sec(&time_of_day, &start_time); framerate = frame_count / (timeDiffSec > 0 ? timeDiffSec : 1); framerateWindow = frame_count_window * 1000000 / timeDiffUsec; GA_LOGVERB("%s ------> has now been called %d times --Create--> framerate since start: %d fps, framerate last %d usec: %d fps", __FUNCTION__, frame_count, framerate, timeDiffUsec, framerateWindow); gettimeofday(&window_time, NULL); frame_count_window = 0; } g_warn_if_fail(gst_buffer_is_writable(p_buf)); /* g_warn_if_fail() used for internal error (exception to our rules for this special "buffer copying case") */ g_assert(gst_buffer_is_writable(p_buf)); /* this buf should be allocated in the base class and should always be writable */ p_outbuf = gst_buffer_make_writable(p_buf); /* do this cause we never wanna crash in release even if somebody makes a mistake somewhere... */ ok = gst_buffer_map(p_outbuf, &mem_info, GST_MAP_WRITE); if (!ok) { goto fill_error_gst_buffer_map; } vcd_ret = VCD_read(p_src->m_devHandle, &(mem_info.data), mem_info.size); if (vcd_ret == VCD_ERR_NO_DATA) { // This should never happen. There should always be more data from the device. // In any case, if it happens we don't want to end or lock or anything, we just // want to go on as if there actually were data. Specifically, we do not want // to block the streaming thread... memset(mem_info.data, 0, mem_info.size); gst_buffer_unmap(p_outbuf, &mem_info); GST_BUFFER_OFFSET(p_outbuf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END(p_outbuf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_PTS(p_outbuf) = GST_CLOCK_TIME_NONE; GST_BUFFER_DTS(p_outbuf) = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION(p_outbuf) = GST_CLOCK_TIME_NONE; GA_LOGWARN("%s: WARNING: Returning GST_FLOW_OK with a buffer with zeros...", __FUNCTION__); return GST_FLOW_OK; } if (vcd_ret != VCD_NO_ERROR) { gst_buffer_unmap(p_outbuf, &mem_info); goto fill_error_read; } gst_buffer_unmap(p_outbuf, &mem_info); set_gstbuf_time_and_offset(p_src, p_outbuf); GA_LOGTRACE("EXIT %s", __FUNCTION__); return GST_FLOW_OK; /* * propagate unhandled errors */ fill_error_read: { GA_LOGERROR("%s: Error when reading data from device!", __FUNCTION__); return GST_FLOW_ERROR; } fill_error_negotiation: { GA_LOGERROR("%s: ERROR: Strange buffer size. Negotiation not done? Disallowed renegotiation done?", __FUNCTION__); return GST_FLOW_ERROR; } fill_error_gst_buffer_map: { GA_LOGERROR("%s: gst_buffer_map() failed!", __FUNCTION__); return GST_FLOW_ERROR; } fill_error_vcd_start: { GA_LOGERROR("%s: FATAL ERROR: Could not start the video device!", __FUNCTION__); return GST_FLOW_ERROR; } }
static GstFlowReturn gst_fake_sink_render (GstBaseSink * bsink, GstBuffer * buf) { GstFakeSink *sink = GST_FAKE_SINK_CAST (bsink); if (sink->num_buffers_left == 0) goto eos; if (sink->num_buffers_left != -1) sink->num_buffers_left--; if (!sink->silent) { gchar dts_str[64], pts_str[64], dur_str[64]; gchar *flag_str, *meta_str; GST_OBJECT_LOCK (sink); g_free (sink->last_message); if (GST_BUFFER_DTS (buf) != GST_CLOCK_TIME_NONE) { g_snprintf (dts_str, sizeof (dts_str), "%" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_DTS (buf))); } else { g_strlcpy (dts_str, "none", sizeof (dts_str)); } if (GST_BUFFER_PTS (buf) != GST_CLOCK_TIME_NONE) { g_snprintf (pts_str, sizeof (pts_str), "%" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (buf))); } else { g_strlcpy (pts_str, "none", sizeof (pts_str)); } if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) { g_snprintf (dur_str, sizeof (dur_str), "%" GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); } else { g_strlcpy (dur_str, "none", sizeof (dur_str)); } flag_str = gst_buffer_get_flags_string (buf); meta_str = gst_buffer_get_meta_string (buf); sink->last_message = g_strdup_printf ("chain ******* (%s:%s) (%u bytes, dts: %s, pts: %s" ", duration: %s, offset: %" G_GINT64_FORMAT ", offset_end: %" G_GINT64_FORMAT ", flags: %08x %s, meta: %s) %p", GST_DEBUG_PAD_NAME (GST_BASE_SINK_CAST (sink)->sinkpad), (guint) gst_buffer_get_size (buf), dts_str, pts_str, dur_str, GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf), GST_MINI_OBJECT_CAST (buf)->flags, flag_str, meta_str ? meta_str : "none", buf); g_free (flag_str); g_free (meta_str); GST_OBJECT_UNLOCK (sink); gst_fake_sink_notify_last_message (sink); } if (sink->signal_handoffs) g_signal_emit (sink, gst_fake_sink_signals[SIGNAL_HANDOFF], 0, buf, bsink->sinkpad); if (sink->dump) { GstMapInfo info; if (gst_buffer_map (buf, &info, GST_MAP_READ)) { gst_util_dump_mem (info.data, info.size); gst_buffer_unmap (buf, &info); } } if (sink->num_buffers_left == 0) goto eos; return GST_FLOW_OK; /* ERRORS */ eos: { GST_DEBUG_OBJECT (sink, "we are EOS"); return GST_FLOW_EOS; } }
static GstFlowReturn gst_wavenc_write_toc (GstWavEnc * wavenc) { GList *list; GstToc *toc; GstTocEntry *entry, *subentry; GstBuffer *buf; GstMapInfo map; guint8 *data; guint32 ncues, size, cues_size, labls_size, notes_size; if (!wavenc->toc) { GST_DEBUG_OBJECT (wavenc, "have no toc, checking toc_setter"); wavenc->toc = gst_toc_setter_get_toc (GST_TOC_SETTER (wavenc)); } if (!wavenc->toc) { GST_WARNING_OBJECT (wavenc, "have no toc"); return GST_FLOW_OK; } toc = gst_toc_ref (wavenc->toc); size = 0; cues_size = 0; labls_size = 0; notes_size = 0; /* check if the TOC entries is valid */ list = gst_toc_get_entries (toc); entry = list->data; if (gst_toc_entry_is_alternative (entry)) { list = gst_toc_entry_get_sub_entries (entry); while (list) { subentry = list->data; if (!gst_toc_entry_is_sequence (subentry)) return FALSE; list = g_list_next (list); } list = gst_toc_entry_get_sub_entries (entry); } if (gst_toc_entry_is_sequence (entry)) { while (list) { entry = list->data; if (!gst_toc_entry_is_sequence (entry)) return FALSE; list = g_list_next (list); } list = gst_toc_get_entries (toc); } ncues = g_list_length (list); GST_DEBUG_OBJECT (wavenc, "number of cue entries: %d", ncues); while (list) { guint32 id = 0; gint64 id64; const gchar *uid; entry = list->data; uid = gst_toc_entry_get_uid (entry); id64 = g_ascii_strtoll (uid, NULL, 0); /* check if id unique compatible with guint32 else generate random */ if (id64 >= 0 && gst_wavenc_is_cue_id_unique (id64, wavenc->cues)) { id = (guint32) id64; } else { do { id = g_random_int (); } while (!gst_wavenc_is_cue_id_unique (id, wavenc->cues)); } gst_wavenc_parse_cue (wavenc, id, entry); gst_wavenc_parse_labl (wavenc, id, entry); gst_wavenc_parse_note (wavenc, id, entry); list = g_list_next (list); } /* count cues size */ if (wavenc->cues) { cues_size = 24 * g_list_length (wavenc->cues); size += 12 + cues_size; } else { GST_WARNING_OBJECT (wavenc, "cue's not found"); return FALSE; } /* count labls size */ if (wavenc->labls) { list = wavenc->labls; while (list) { GstWavEncLabl *labl; labl = list->data; labls_size += 8 + GST_ROUND_UP_2 (labl->chunk_data_size); list = g_list_next (list); } size += labls_size; } /* count notes size */ if (wavenc->notes) { list = wavenc->notes; while (list) { GstWavEncNote *note; note = list->data; notes_size += 8 + GST_ROUND_UP_2 (note->chunk_data_size); list = g_list_next (list); } size += notes_size; } if (wavenc->labls || wavenc->notes) { size += 12; } buf = gst_buffer_new_and_alloc (size); gst_buffer_map (buf, &map, GST_MAP_WRITE); data = map.data; memset (data, 0, size); /* write Cue Chunk */ if (wavenc->cues) { memcpy (data, (gchar *) "cue ", 4); GST_WRITE_UINT32_LE (data + 4, 4 + cues_size); GST_WRITE_UINT32_LE (data + 8, ncues); data += 12; gst_wavenc_write_cues (&data, wavenc->cues); /* write Associated Data List Chunk */ if (wavenc->labls || wavenc->notes) { memcpy (data, (gchar *) "LIST", 4); GST_WRITE_UINT32_LE (data + 4, 4 + labls_size + notes_size); memcpy (data + 8, (gchar *) "adtl", 4); data += 12; if (wavenc->labls) gst_wavenc_write_labls (&data, wavenc->labls); if (wavenc->notes) gst_wavenc_write_notes (&data, wavenc->notes); } } /* free resources */ if (toc) gst_toc_unref (toc); if (wavenc->cues) g_list_free_full (wavenc->cues, g_free); if (wavenc->labls) g_list_free_full (wavenc->labls, g_free); if (wavenc->notes) g_list_free_full (wavenc->notes, g_free); gst_buffer_unmap (buf, &map); wavenc->meta_length += gst_buffer_get_size (buf); return gst_pad_push (wavenc->srcpad, buf); }
static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) { GstRtpVRawPay *rtpvrawpay; GstFlowReturn ret = GST_FLOW_OK; gfloat packets_per_packline; guint pgroups_per_packet; guint packlines_per_list, buffers_per_list; guint lines_delay; /* after how many packed lines we push out a buffer list */ guint last_line; /* last pack line number we pushed out a buffer list */ guint line, offset; guint8 *p0, *yp, *up, *vp; guint ystride, uvstride; guint xinc, yinc; guint pgroup; guint mtu; guint width, height; gint field, fields; GstVideoFormat format; GstVideoFrame frame; gint interlaced; gboolean use_buffer_lists; GstBufferList *list = NULL; GstRTPBuffer rtp = { NULL, }; rtpvrawpay = GST_RTP_VRAW_PAY (payload); if (!gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ)) { gst_buffer_unref (buffer); return GST_FLOW_ERROR; } GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes", gst_buffer_get_size (buffer)); /* get pointer and strides of the planes */ p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0); yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0); up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1); vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2); ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0); uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1); mtu = GST_RTP_BASE_PAYLOAD_MTU (payload); /* amount of bytes for one pixel */ pgroup = rtpvrawpay->pgroup; width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo); height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo); interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo); format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo); yinc = rtpvrawpay->yinc; xinc = rtpvrawpay->xinc; /* after how many packed lines we push out a buffer list */ lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame); /* calculate how many buffers we expect to store in a single buffer list */ pgroups_per_packet = (mtu - (12 + 14)) / pgroup; packets_per_packline = width / (xinc * pgroups_per_packet * 1.0); packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame); buffers_per_list = packlines_per_list * packets_per_packline; buffers_per_list = GST_ROUND_UP_8 (buffers_per_list); use_buffer_lists = (rtpvrawpay->chunks_per_frame < (height / yinc)); fields = 1 + interlaced; /* start with line 0, offset 0 */ for (field = 0; field < fields; field++) { line = field; offset = 0; last_line = 0; if (use_buffer_lists) list = gst_buffer_list_new_sized (buffers_per_list); /* write all lines */ while (line < height) { guint left, pack_line; GstBuffer *out; guint8 *outdata, *headers; gboolean next_line, complete = FALSE; guint length, cont, pixels; /* get the max allowed payload length size, we try to fill the complete MTU */ left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0); out = gst_rtp_buffer_new_allocate (left, 0, 0); if (field == 0) { GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer); } else { GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) + GST_BUFFER_DURATION (buffer) / 2; } gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp); outdata = gst_rtp_buffer_get_payload (&rtp); GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left, mtu); /* * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Extended Sequence Number | Length | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |F| Line No |C| Offset | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Length |F| Line No | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |C| Offset | . * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ . * . . * . Two (partial) lines of video data . * . . * +---------------------------------------------------------------+ */ /* need 2 bytes for the extended sequence number */ *outdata++ = 0; *outdata++ = 0; left -= 2; /* the headers start here */ headers = outdata; /* make sure we can fit at least *one* header and pixel */ if (!(left > (6 + pgroup))) { gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto too_small; } /* while we can fit at least one header and one pixel */ while (left > (6 + pgroup)) { /* we need a 6 bytes header */ left -= 6; /* get how may bytes we need for the remaining pixels */ pixels = width - offset; length = (pixels * pgroup) / xinc; if (left >= length) { /* pixels and header fit completely, we will write them and skip to the * next line. */ next_line = TRUE; } else { /* line does not fit completely, see how many pixels fit */ pixels = (left / pgroup) * xinc; length = (pixels * pgroup) / xinc; next_line = FALSE; } GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length, pixels); left -= length; /* write length */ *outdata++ = (length >> 8) & 0xff; *outdata++ = length & 0xff; /* write line no */ *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80); *outdata++ = line & 0xff; if (next_line) { /* go to next line we do this here to make the check below easier */ line += yinc; } /* calculate continuation marker */ cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00; /* write offset and continuation marker */ *outdata++ = ((offset >> 8) & 0x7f) | cont; *outdata++ = offset & 0xff; if (next_line) { /* reset offset */ offset = 0; GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line); } else { offset += pixels; GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset); } if (!cont) break; } GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes", (guint) (outdata - headers)); /* second pass, read headers and write the data */ while (TRUE) { guint offs, lin; /* read length and cont */ length = (headers[0] << 8) | headers[1]; lin = ((headers[2] & 0x7f) << 8) | headers[3]; offs = ((headers[4] & 0x7f) << 8) | headers[5]; cont = headers[4] & 0x80; pixels = length / pgroup; headers += 6; GST_LOG_OBJECT (payload, "writing length %u, line %u, offset %u, cont %d", length, lin, offs, cont); switch (format) { case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_UYVY: case GST_VIDEO_FORMAT_UYVP: offs /= xinc; memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length); outdata += length; break; case GST_VIDEO_FORMAT_AYUV: { gint i; guint8 *datap; datap = p0 + (lin * ystride) + (offs * 4); for (i = 0; i < pixels; i++) { *outdata++ = datap[2]; *outdata++ = datap[1]; *outdata++ = datap[3]; datap += 4; } break; } case GST_VIDEO_FORMAT_I420: { gint i; guint uvoff; guint8 *yd1p, *yd2p, *udp, *vdp; yd1p = yp + (lin * ystride) + (offs); yd2p = yd1p + ystride; uvoff = (lin / yinc * uvstride) + (offs / xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *yd1p++; *outdata++ = *yd1p++; *outdata++ = *yd2p++; *outdata++ = *yd2p++; *outdata++ = *udp++; *outdata++ = *vdp++; } break; } case GST_VIDEO_FORMAT_Y41B: { gint i; guint uvoff; guint8 *ydp, *udp, *vdp; ydp = yp + (lin * ystride) + offs; uvoff = (lin / yinc * uvstride) + (offs / xinc); udp = up + uvoff; vdp = vp + uvoff; for (i = 0; i < pixels; i++) { *outdata++ = *udp++; *outdata++ = *ydp++; *outdata++ = *ydp++; *outdata++ = *vdp++; *outdata++ = *ydp++; *outdata++ = *ydp++; } break; } default: gst_rtp_buffer_unmap (&rtp); gst_buffer_unref (out); goto unknown_sampling; } if (!cont) break; } if (line >= height) { GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker"); gst_rtp_buffer_set_marker (&rtp, TRUE); complete = TRUE; } gst_rtp_buffer_unmap (&rtp); if (left > 0) { GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left); gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left); } gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpvrawpay), out, buffer, g_quark_from_static_string (GST_META_TAG_VIDEO_STR)); /* Now either push out the buffer directly */ if (!use_buffer_lists) { ret = gst_rtp_base_payload_push (payload, out); continue; } /* or add the buffer to buffer list ... */ gst_buffer_list_add (list, out); /* .. and check if we need to push out the list */ pack_line = (line - field) / fields; if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) { GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack " "line %u", gst_buffer_list_length (list), pack_line); ret = gst_rtp_base_payload_push_list (payload, list); list = NULL; if (!complete) list = gst_buffer_list_new_sized (buffers_per_list); last_line = pack_line; } } } gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return ret; /* ERRORS */ unknown_sampling: { GST_ELEMENT_ERROR (payload, STREAM, FORMAT, (NULL), ("unimplemented sampling")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } too_small: { GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT, (NULL), ("not enough space to send at least one pixel")); gst_video_frame_unmap (&frame); gst_buffer_unref (buffer); return GST_FLOW_NOT_SUPPORTED; } }
GstFlowReturn gst_kate_util_decoder_base_chain_kate_packet (GstKateDecoderBase * decoder, GstElement * element, GstPad * pad, GstBuffer * buf, GstPad * srcpad, GstPad * tagpad, GstCaps ** src_caps, const kate_event ** ev) { kate_packet kp; int ret; GstFlowReturn rflow = GST_FLOW_OK; gboolean is_header; guint8 *data; gsize size; guint8 header[1]; size = gst_buffer_extract (buf, 0, header, 1); GST_DEBUG_OBJECT (element, "got kate packet, %u bytes, type %02x", gst_buffer_get_size (buf), size == 0 ? -1 : header[0]); is_header = size > 0 && (header[0] & 0x80); if (!is_header && decoder->tags) { /* after we've processed headers, send any tags before processing the data packet */ GST_DEBUG_OBJECT (element, "Not a header, sending tags for pad %s:%s", GST_DEBUG_PAD_NAME (tagpad)); gst_element_found_tags_for_pad (element, tagpad, decoder->tags); decoder->tags = NULL; } data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ); kate_packet_wrap (&kp, size, data); ret = kate_high_decode_packetin (&decoder->k, &kp, ev); gst_buffer_unmap (buf, data, size); if (G_UNLIKELY (ret < 0)) { GST_ELEMENT_ERROR (element, STREAM, DECODE, (NULL), ("Failed to decode Kate packet: %s", gst_kate_util_get_error_message (ret))); return GST_FLOW_ERROR; } if (G_UNLIKELY (ret > 0)) { GST_DEBUG_OBJECT (element, "kate_high_decode_packetin has received EOS packet"); } /* headers may be interesting to retrieve information from */ if (G_UNLIKELY (is_header)) { switch (header[0]) { case 0x80: /* ID header */ GST_INFO_OBJECT (element, "Parsed ID header: language %s, category %s", decoder->k.ki->language, decoder->k.ki->category); if (src_caps) { if (*src_caps) { gst_caps_unref (*src_caps); *src_caps = NULL; } if (strcmp (decoder->k.ki->category, "K-SPU") == 0 || strcmp (decoder->k.ki->category, "spu-subtitles") == 0) { *src_caps = gst_caps_new_empty_simple ("video/x-dvd-subpicture"); } else if (decoder->k.ki->text_markup_type == kate_markup_none) { *src_caps = gst_caps_new_empty_simple ("text/plain"); } else { *src_caps = gst_caps_new_empty_simple ("text/x-pango-markup"); } GST_INFO_OBJECT (srcpad, "Setting caps: %" GST_PTR_FORMAT, *src_caps); if (!gst_pad_set_caps (srcpad, *src_caps)) { GST_ERROR_OBJECT (srcpad, "Failed to set caps %" GST_PTR_FORMAT, *src_caps); } } if (decoder->k.ki->language && *decoder->k.ki->language) { GstTagList *old = decoder->tags, *tags = gst_tag_list_new_empty (); if (tags) { gchar *lang_code; /* en_GB -> en */ lang_code = g_ascii_strdown (decoder->k.ki->language, -1); g_strdelimit (lang_code, NULL, '\0'); gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_LANGUAGE_CODE, lang_code, NULL); g_free (lang_code); /* TODO: category - where should it go ? */ decoder->tags = gst_tag_list_merge (decoder->tags, tags, GST_TAG_MERGE_REPLACE); gst_tag_list_free (tags); if (old) gst_tag_list_free (old); } } /* update properties */ if (decoder->language) g_free (decoder->language); decoder->language = g_strdup (decoder->k.ki->language); if (decoder->category) g_free (decoder->category); decoder->category = g_strdup (decoder->k.ki->category); decoder->original_canvas_width = decoder->k.ki->original_canvas_width; decoder->original_canvas_height = decoder->k.ki->original_canvas_height; /* we can now send away any event we've delayed, as the src pad now has caps */ gst_kate_util_decoder_base_drain_event_queue (decoder); break; case 0x81: /* Vorbis comments header */ GST_INFO_OBJECT (element, "Parsed comments header"); { gchar *encoder = NULL; GstTagList *old = decoder->tags, *list = gst_tag_list_from_vorbiscomment_buffer (buf, (const guint8 *) "\201kate\0\0\0\0", 9, &encoder); if (list) { decoder->tags = gst_tag_list_merge (decoder->tags, list, GST_TAG_MERGE_REPLACE); gst_tag_list_free (list); } if (!decoder->tags) { GST_ERROR_OBJECT (element, "failed to decode comment header"); decoder->tags = gst_tag_list_new_empty (); } if (encoder) { gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, encoder, NULL); g_free (encoder); } gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE, GST_TAG_SUBTITLE_CODEC, "Kate", NULL); gst_tag_list_add (decoder->tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER_VERSION, decoder->k.ki->bitstream_version_major, NULL); if (old) gst_tag_list_free (old); if (decoder->initialized) { gst_element_found_tags_for_pad (element, tagpad, decoder->tags); decoder->tags = NULL; } else { /* Only push them as messages for the time being. * * They will be pushed on the pad once the decoder is initialized */ gst_element_post_message (element, gst_message_new_tag (GST_OBJECT (element), gst_tag_list_copy (decoder->tags))); } } break; default: break; } } #if ((KATE_VERSION_MAJOR<<16)|(KATE_VERSION_MINOR<<8)|KATE_VERSION_PATCH) >= 0x000400 else if (*ev && (*ev)->meta) { int count = kate_meta_query_count ((*ev)->meta); if (count > 0) { GstTagList *evtags = gst_tag_list_new_empty (); int idx; GST_DEBUG_OBJECT (decoder, "Kate event has %d attached metadata", count); for (idx = 0; idx < count; ++idx) { const char *tag, *value; size_t len; if (kate_meta_query ((*ev)->meta, idx, &tag, &value, &len) < 0) { GST_WARNING_OBJECT (decoder, "Failed to retrieve metadata %d", idx); } else { if (gst_kate_util_is_utf8_string (value, len)) { gchar *compound = g_strdup_printf ("%s=%s", tag, value); GST_DEBUG_OBJECT (decoder, "Metadata %d: %s=%s (%zu bytes)", idx, tag, value, len); gst_tag_list_add (evtags, GST_TAG_MERGE_APPEND, GST_TAG_EXTENDED_COMMENT, compound, NULL); g_free (compound); } else { GST_INFO_OBJECT (decoder, "Metadata %d, (%s, %zu bytes) is binary, ignored", idx, tag, len); } } } if (gst_tag_list_is_empty (evtags)) gst_tag_list_free (evtags); else gst_element_found_tags_for_pad (element, tagpad, evtags); } } #endif return rflow; }
static GstFlowReturn gst_bz2enc_chain (GstPad * pad, GstObject * parent, GstBuffer * in) { GstFlowReturn flow = GST_FLOW_OK; GstBuffer *out; GstBz2enc *b; guint n; int bz2_ret; GstMapInfo map, omap; b = GST_BZ2ENC (parent); if (!b->ready) goto not_ready; gst_buffer_map (in, &map, GST_MAP_READ); b->stream.next_in = (char *) map.data; b->stream.avail_in = map.size; while (b->stream.avail_in) { out = gst_buffer_new_and_alloc (b->buffer_size); gst_buffer_map (out, &omap, GST_MAP_WRITE); b->stream.next_out = (char *) omap.data; b->stream.avail_out = omap.size; bz2_ret = BZ2_bzCompress (&b->stream, BZ_RUN); gst_buffer_unmap (out, &omap); if (bz2_ret != BZ_RUN_OK) goto compress_error; n = gst_buffer_get_size (out); if (b->stream.avail_out >= n) { gst_buffer_unref (out); break; } gst_buffer_resize (out, 0, n - b->stream.avail_out); n = gst_buffer_get_size (out); GST_BUFFER_OFFSET (out) = b->stream.total_out_lo32 - n; flow = gst_pad_push (b->src, out); if (flow != GST_FLOW_OK) break; b->offset += n; } done: gst_buffer_unmap (in, &map); gst_buffer_unref (in); return flow; /* ERRORS */ not_ready: { GST_ELEMENT_ERROR (b, LIBRARY, FAILED, (NULL), ("Compressor not ready.")); flow = GST_FLOW_FLUSHING; goto done; } compress_error: { GST_ELEMENT_ERROR (b, STREAM, ENCODE, (NULL), ("Failed to compress data (error code %i)", bz2_ret)); gst_bz2enc_compress_init (b); gst_buffer_unref (out); flow = GST_FLOW_ERROR; goto done; } }
/** * gst_audio_buffer_clip: * @buffer: (transfer full): The buffer to clip. * @segment: Segment in %GST_FORMAT_TIME or %GST_FORMAT_DEFAULT to which * the buffer should be clipped. * @rate: sample rate. * @bpf: size of one audio frame in bytes. This is the size of one sample * * channels. * * Clip the buffer to the given %GstSegment. * * After calling this function the caller does not own a reference to * @buffer anymore. * * Returns: (transfer full): %NULL if the buffer is completely outside the configured segment, * otherwise the clipped buffer is returned. * * If the buffer has no timestamp, it is assumed to be inside the segment and * is not clipped */ GstBuffer * gst_audio_buffer_clip (GstBuffer * buffer, GstSegment * segment, gint rate, gint bpf) { GstBuffer *ret; GstClockTime timestamp = GST_CLOCK_TIME_NONE, duration = GST_CLOCK_TIME_NONE; guint64 offset = GST_BUFFER_OFFSET_NONE, offset_end = GST_BUFFER_OFFSET_NONE; gsize trim, size, osize; gboolean change_duration = TRUE, change_offset = TRUE, change_offset_end = TRUE; g_return_val_if_fail (segment->format == GST_FORMAT_TIME || segment->format == GST_FORMAT_DEFAULT, buffer); g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) /* No timestamp - assume the buffer is completely in the segment */ return buffer; /* Get copies of the buffer metadata to change later. * Calculate the missing values for the calculations, * they won't be changed later though. */ trim = 0; osize = size = gst_buffer_get_size (buffer); /* no data, nothing to clip */ if (!size) return buffer; timestamp = GST_BUFFER_TIMESTAMP (buffer); GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); if (GST_BUFFER_DURATION_IS_VALID (buffer)) { duration = GST_BUFFER_DURATION (buffer); } else { change_duration = FALSE; duration = gst_util_uint64_scale (size / bpf, GST_SECOND, rate); } if (GST_BUFFER_OFFSET_IS_VALID (buffer)) { offset = GST_BUFFER_OFFSET (buffer); } else { change_offset = FALSE; offset = 0; } if (GST_BUFFER_OFFSET_END_IS_VALID (buffer)) { offset_end = GST_BUFFER_OFFSET_END (buffer); } else { change_offset_end = FALSE; offset_end = offset + size / bpf; } if (segment->format == GST_FORMAT_TIME) { /* Handle clipping for GST_FORMAT_TIME */ guint64 start, stop, cstart, cstop, diff; start = timestamp; stop = timestamp + duration; if (gst_segment_clip (segment, GST_FORMAT_TIME, start, stop, &cstart, &cstop)) { diff = cstart - start; if (diff > 0) { timestamp = cstart; if (change_duration) duration -= diff; diff = gst_util_uint64_scale (diff, rate, GST_SECOND); if (change_offset) offset += diff; trim += diff * bpf; size -= diff * bpf; } diff = stop - cstop; if (diff > 0) { /* duration is always valid if stop is valid */ duration -= diff; diff = gst_util_uint64_scale (diff, rate, GST_SECOND); if (change_offset_end) offset_end -= diff; size -= diff * bpf; } } else { gst_buffer_unref (buffer); return NULL; } } else { /* Handle clipping for GST_FORMAT_DEFAULT */ guint64 start, stop, cstart, cstop, diff; g_return_val_if_fail (GST_BUFFER_OFFSET_IS_VALID (buffer), buffer); start = offset; stop = offset_end; if (gst_segment_clip (segment, GST_FORMAT_DEFAULT, start, stop, &cstart, &cstop)) { diff = cstart - start; if (diff > 0) { offset = cstart; timestamp = gst_util_uint64_scale (cstart, GST_SECOND, rate); if (change_duration) duration -= gst_util_uint64_scale (diff, GST_SECOND, rate); trim += diff * bpf; size -= diff * bpf; } diff = stop - cstop; if (diff > 0) { offset_end = cstop; if (change_duration) duration -= gst_util_uint64_scale (diff, GST_SECOND, rate); size -= diff * bpf; } } else { gst_buffer_unref (buffer); return NULL; } } if (trim == 0 && size == osize) { ret = buffer; if (GST_BUFFER_TIMESTAMP (ret) != timestamp) { ret = gst_buffer_make_writable (ret); GST_BUFFER_TIMESTAMP (ret) = timestamp; } if (GST_BUFFER_DURATION (ret) != duration) { ret = gst_buffer_make_writable (ret); GST_BUFFER_DURATION (ret) = duration; } } else { /* Get a writable buffer and apply all changes */ GST_DEBUG ("trim %" G_GSIZE_FORMAT " size %" G_GSIZE_FORMAT, trim, size); ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, trim, size); gst_buffer_unref (buffer); GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); GST_BUFFER_TIMESTAMP (ret) = timestamp; if (change_duration) GST_BUFFER_DURATION (ret) = duration; if (change_offset) GST_BUFFER_OFFSET (ret) = offset; if (change_offset_end) GST_BUFFER_OFFSET_END (ret) = offset_end; } return ret; }