static void gst_rfc2250_enc_add_slice (GstRFC2250Enc * enc, GstBuffer * buffer) { gint slice_length = GST_BUFFER_SIZE (buffer); /* see if the slice fits in the current buffer */ if (slice_length <= enc->remaining) { GstBuffer *newbuf; newbuf = gst_buffer_merge (enc->packet, buffer); gst_buffer_unref (buffer); gst_buffer_unref (enc->packet); enc->packet = newbuf; enc->remaining -= slice_length; } /* it doesn't fit */ else { /* do we need to start a new packet? */ if (slice_length <= enc->MTU) { GstBuffer *newbuf; gst_rfc2250_enc_new_buffer (enc); newbuf = gst_buffer_merge (enc->packet, buffer); gst_buffer_unref (buffer); gst_buffer_unref (enc->packet); enc->packet = newbuf; enc->remaining -= slice_length; } /* else we have to fragment */ else { gint offset = 0; while (slice_length > 0) { GstBuffer *outbuf; GstBuffer *newbuf; outbuf = gst_buffer_create_sub (buffer, offset, MIN (enc->remaining, slice_length)); newbuf = gst_buffer_merge (enc->packet, outbuf); slice_length -= GST_BUFFER_SIZE (outbuf); offset += GST_BUFFER_SIZE (outbuf); gst_buffer_unref (outbuf); gst_buffer_unref (newbuf); enc->packet = newbuf; gst_rfc2250_enc_new_buffer (enc); } gst_buffer_unref (buffer); } } }
static void dxr3videosink_chain (GstPad * pad, GstData * _data) { GstBuffer *buf = GST_BUFFER (_data); Dxr3VideoSink *sink; GstBuffer *merged; g_return_if_fail (pad != NULL); g_return_if_fail (GST_IS_PAD (pad)); g_return_if_fail (buf != NULL); sink = DXR3VIDEOSINK (gst_pad_get_parent (pad)); if (GST_IS_EVENT (buf)) { dxr3videosink_handle_event (pad, GST_EVENT (buf)); return; } /* fprintf (stderr, "^^^^^^ Video block\n"); */ if (sink->cur_buf == NULL) { sink->cur_buf = buf; } else { merged = gst_buffer_merge (sink->cur_buf, buf); gst_buffer_unref (sink->cur_buf); gst_buffer_unref (buf); sink->cur_buf = merged; } sink->last_ts = GST_BUFFER_TIMESTAMP (buf); dxr3videosink_parse_data (sink); }
static GstBuffer* gst_goo_decspark_codec_data_processing (GstGooVideoFilter *filter, GstBuffer *buffer) { GstGooDecSpark *self = GST_GOO_DECSPARK (filter); if (GST_IS_BUFFER (GST_GOO_VIDEODEC(self)->video_header)) { GST_DEBUG_OBJECT (self, "Adding SPARK header info to buffer"); GstBuffer *new_buf = gst_buffer_merge (GST_BUFFER (GST_GOO_VIDEODEC(self)->video_header), GST_BUFFER (buffer)); /* gst_buffer_merge() will end up putting video_header's timestamp on * the new buffer, but actually we want buf's timestamp: */ GST_BUFFER_TIMESTAMP (new_buf) = GST_BUFFER_TIMESTAMP (buffer); buffer = new_buf; gst_buffer_unref (GST_GOO_VIDEODEC(self)->video_header); } return buffer; }
static void gst_rfc2250_enc_loop (GstElement * element) { GstRFC2250Enc *enc = GST_RFC2250_ENC (element); GstData *data; guint id; gboolean mpeg2; data = gst_mpeg_packetize_read (enc->packetize); id = GST_MPEG_PACKETIZE_ID (enc->packetize); mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (enc->packetize); if (GST_IS_BUFFER (data)) { GstBuffer *buffer = GST_BUFFER (data); GST_DEBUG ("rfc2250enc: have chunk 0x%02X", id); switch (id) { case SEQUENCE_START_CODE: gst_rfc2250_enc_new_buffer (enc); enc->flags |= ENC_HAVE_SEQ; break; case GOP_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_GOP; break; case PICTURE_START_CODE: if (enc->flags & ENC_HAVE_DATA) { gst_rfc2250_enc_new_buffer (enc); } enc->flags |= ENC_HAVE_PIC; break; case EXT_START_CODE: case USER_START_CODE: case SEQUENCE_ERROR_START_CODE: case SEQUENCE_END_START_CODE: break; default: /* do this here because of the long range */ if (id >= SLICE_MIN_START_CODE && id <= SLICE_MAX_START_CODE) { enc->flags |= ENC_HAVE_DATA; gst_rfc2250_enc_add_slice (enc, buffer); buffer = NULL; break; } break; } if (buffer) { gst_buffer_merge (enc->packet, buffer); enc->remaining -= GST_BUFFER_SIZE (buffer); gst_buffer_unref (buffer); } } else { if (enc->packet) { gst_pad_push (enc->srcpad, GST_DATA (enc->packet)); enc->packet = NULL; enc->flags = 0; enc->remaining = enc->MTU; } gst_pad_event_default (enc->sinkpad, GST_EVENT (data)); } }
static GstVaapiDecoderStatus decode_buffer(GstVaapiDecoderVC1 *decoder, GstBuffer *buffer) { GstVaapiDecoderVC1Private * const priv = decoder->priv; GstVaapiDecoderStatus status; GstVC1ParserResult result; GstVC1BDU ebdu; GstBuffer *codec_data; guchar *buf; guint buf_size, ofs; buf = GST_BUFFER_DATA(buffer); buf_size = GST_BUFFER_SIZE(buffer); if (!buf && buf_size == 0) return decode_sequence_end(decoder); gst_buffer_ref(buffer); gst_adapter_push(priv->adapter, buffer); /* Assume demuxer sends out plain frames if codec-data */ codec_data = GST_VAAPI_DECODER_CODEC_DATA(decoder); if (codec_data && codec_data != buffer) { ebdu.type = GST_VC1_FRAME; ebdu.size = buf_size; ebdu.sc_offset = 0; ebdu.offset = 0; ebdu.data = buf; status = decode_ebdu(decoder, &ebdu); if (gst_adapter_available(priv->adapter) >= buf_size) gst_adapter_flush(priv->adapter, buf_size); return status; } if (priv->sub_buffer) { buffer = gst_buffer_merge(priv->sub_buffer, buffer); if (!buffer) return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED; gst_buffer_unref(priv->sub_buffer); priv->sub_buffer = NULL; } buf = GST_BUFFER_DATA(buffer); buf_size = GST_BUFFER_SIZE(buffer); ofs = 0; do { result = gst_vc1_identify_next_bdu( buf + ofs, buf_size - ofs, &ebdu ); status = get_status(result); if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA) { priv->sub_buffer = gst_buffer_create_sub(buffer, ofs, buf_size - ofs); break; } if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) break; ofs += ebdu.offset + ebdu.size; if (gst_adapter_available(priv->adapter) >= ebdu.offset) gst_adapter_flush(priv->adapter, ebdu.offset); status = decode_ebdu(decoder, &ebdu); if (gst_adapter_available(priv->adapter) >= ebdu.size) gst_adapter_flush(priv->adapter, ebdu.size); } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS); return status; }
static GstFlowReturn gst_jasper_dec_chain (GstPad * pad, GstBuffer * buf) { GstJasperDec *dec; GstFlowReturn ret = GST_FLOW_OK; GstClockTime ts; GstBuffer *outbuf = NULL; guint8 *data; guint size; gboolean decode; dec = GST_JASPER_DEC (GST_PAD_PARENT (pad)); if (dec->fmt < 0) goto not_negotiated; ts = GST_BUFFER_TIMESTAMP (buf); GST_LOG_OBJECT (dec, "buffer with ts: %" GST_TIME_FORMAT, GST_TIME_ARGS (ts)); if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) dec->discont = TRUE; decode = gst_jasper_dec_do_qos (dec, ts); /* FIXME: do clipping */ if (G_UNLIKELY (!decode)) { dec->discont = TRUE; goto done; } /* strip possible prefix */ if (dec->strip) { GstBuffer *tmp; tmp = gst_buffer_create_sub (buf, dec->strip, GST_BUFFER_SIZE (buf) - dec->strip); gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buf); buf = tmp; } /* preprend possible codec_data */ if (dec->codec_data) { GstBuffer *tmp; tmp = gst_buffer_merge (dec->codec_data, buf); gst_buffer_copy_metadata (tmp, buf, GST_BUFFER_COPY_TIMESTAMPS); gst_buffer_unref (buf); buf = tmp; } /* now really feed the data to decoder */ data = GST_BUFFER_DATA (buf); size = GST_BUFFER_SIZE (buf); ret = gst_jasper_dec_get_picture (dec, data, size, &outbuf); if (outbuf) { gst_buffer_copy_metadata (outbuf, buf, GST_BUFFER_COPY_TIMESTAMPS); if (dec->discont) { GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); dec->discont = FALSE; } if (ret == GST_FLOW_OK) ret = gst_pad_push (dec->srcpad, outbuf); else gst_buffer_unref (outbuf); } done: gst_buffer_unref (buf); return ret; /* ERRORS */ not_negotiated: { GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL), ("format wasn't negotiated before chain function")); ret = GST_FLOW_NOT_NEGOTIATED; goto done; } }