/** * gst_adapter_take_buffer: * @adapter: a #GstAdapter * @nbytes: the number of bytes to take * * Returns a #GstBuffer containing the first @nbytes bytes of the * @adapter. The returned bytes will be flushed from the adapter. * This function is potentially more performant than * gst_adapter_take() since it can reuse the memory in pushed buffers * by subbuffering or merging. This function will always return a * buffer with a single memory region. * * Note that no assumptions should be made as to whether certain buffer * flags such as the DISCONT flag are set on the returned buffer, or not. * The caller needs to explicitly set or unset flags that should be set or * unset. * * Since 1.6 this will also copy over all GstMeta of the input buffers except * for meta with the %GST_META_FLAG_POOLED flag or with the "memory" tag. * * Caller owns a reference to the returned buffer. gst_buffer_unref() after * usage. * * Free-function: gst_buffer_unref * * Returns: (transfer full) (nullable): a #GstBuffer containing the first * @nbytes of the adapter, or %NULL if @nbytes bytes are not available. * gst_buffer_unref() when no longer needed. */ GstBuffer * gst_adapter_take_buffer (GstAdapter * adapter, gsize nbytes) { GstBuffer *buffer; g_return_val_if_fail (GST_IS_ADAPTER (adapter), NULL); g_return_val_if_fail (nbytes > 0, NULL); buffer = gst_adapter_get_buffer (adapter, nbytes); if (buffer) gst_adapter_flush_unchecked (adapter, nbytes); return buffer; }
/** * @brief Chain function, this function does the actual processing. */ static GstFlowReturn gst_tensor_aggregator_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstTensorAggregator *self; GstFlowReturn ret = GST_FLOW_OK; GstAdapter *adapter; gsize avail, buf_size, frame_size, out_size; guint frames_in, frames_out, frames_flush; GstClockTime duration; self = GST_TENSOR_AGGREGATOR (parent); g_assert (self->tensor_configured); buf_size = gst_buffer_get_size (buf); g_return_val_if_fail (buf_size > 0, GST_FLOW_ERROR); frames_in = self->frames_in; frames_out = self->frames_out; frames_flush = self->frames_flush; frame_size = buf_size / frames_in; if (frames_in == frames_out) { /** push the incoming buffer (do concat if needed) */ return gst_tensor_aggregator_push (self, buf, frame_size); } adapter = self->adapter; g_assert (adapter != NULL); duration = GST_BUFFER_DURATION (buf); if (GST_CLOCK_TIME_IS_VALID (duration)) { /** supposed same duration for incoming buffer */ duration = gst_util_uint64_scale_int (duration, frames_out, frames_in); } gst_adapter_push (adapter, buf); out_size = frame_size * frames_out; g_assert (out_size > 0); while ((avail = gst_adapter_available (adapter)) >= out_size && ret == GST_FLOW_OK) { GstBuffer *outbuf; GstClockTime pts, dts; guint64 pts_dist, dts_dist; gsize flush; pts = gst_adapter_prev_pts (adapter, &pts_dist); dts = gst_adapter_prev_dts (adapter, &dts_dist); /** * Update timestamp. * If frames-in is larger then frames-out, the same timestamp (pts and dts) would be returned. */ if (frames_in > 1) { gint fn, fd; fn = self->in_config.rate_n; fd = self->in_config.rate_d; if (fn > 0 && fd > 0) { if (GST_CLOCK_TIME_IS_VALID (pts)) { pts += gst_util_uint64_scale_int (pts_dist * fd, GST_SECOND, fn * frame_size); } if (GST_CLOCK_TIME_IS_VALID (dts)) { dts += gst_util_uint64_scale_int (dts_dist * fd, GST_SECOND, fn * frame_size); } } } outbuf = gst_adapter_get_buffer (adapter, out_size); outbuf = gst_buffer_make_writable (outbuf); /** set timestamp */ GST_BUFFER_PTS (outbuf) = pts; GST_BUFFER_DTS (outbuf) = dts; GST_BUFFER_DURATION (outbuf) = duration; ret = gst_tensor_aggregator_push (self, outbuf, frame_size); /** flush data */ if (frames_flush > 0) { flush = frame_size * frames_flush; if (flush > avail) { /** * @todo flush data * Invalid state, tried to flush large size. * We have to determine how to handle this case. (flush the out-size or all available bytes) * Now all available bytes in adapter will be flushed. */ flush = avail; } } else { flush = out_size; } gst_adapter_flush (adapter, flush); } return ret; }
static GstFlowReturn gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) { GstBaseVideoDecoder *base_video_decoder; GstBaseVideoDecoderClass *klass; GstBuffer *buffer; GstFlowReturn ret; GST_DEBUG ("chain %" G_GINT64_FORMAT, GST_BUFFER_TIMESTAMP (buf)); #if 0 /* requiring the pad to be negotiated makes it impossible to use * oggdemux or filesrc ! decoder */ if (!gst_pad_is_negotiated (pad)) { GST_DEBUG ("not negotiated"); return GST_FLOW_NOT_NEGOTIATED; } #endif base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_DEBUG_OBJECT (base_video_decoder, "chain"); if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); if (base_video_decoder->started) { gst_base_video_decoder_reset (base_video_decoder); } } if (!base_video_decoder->started) { klass->start (base_video_decoder); base_video_decoder->started = TRUE; } if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { GST_DEBUG ("timestamp %" G_GINT64_FORMAT " offset %" G_GINT64_FORMAT, GST_BUFFER_TIMESTAMP (buf), base_video_decoder->offset); base_video_decoder->last_sink_timestamp = GST_BUFFER_TIMESTAMP (buf); } if (GST_BUFFER_OFFSET_END (buf) != -1) { GST_DEBUG ("gp %" G_GINT64_FORMAT, GST_BUFFER_OFFSET_END (buf)); base_video_decoder->last_sink_offset_end = GST_BUFFER_OFFSET_END (buf); } base_video_decoder->offset += GST_BUFFER_SIZE (buf); #if 0 if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE && GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { GST_DEBUG ("got new offset %lld", GST_BUFFER_TIMESTAMP (buf)); base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf); } #endif if (base_video_decoder->current_frame == NULL) { base_video_decoder->current_frame = gst_base_video_decoder_new_frame (base_video_decoder); } gst_adapter_push (base_video_decoder->input_adapter, buf); if (!base_video_decoder->have_sync) { int n, m; GST_DEBUG ("no sync, scanning"); n = gst_adapter_available (base_video_decoder->input_adapter); m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n); if (m < 0) { g_warning ("subclass returned negative scan %d", m); } if (m >= n) { g_warning ("subclass scanned past end %d >= %d", m, n); } gst_adapter_flush (base_video_decoder->input_adapter, m); if (m < n) { GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); /* this is only "maybe" sync */ base_video_decoder->have_sync = TRUE; } if (!base_video_decoder->have_sync) { gst_object_unref (base_video_decoder); return GST_FLOW_OK; } } /* FIXME: use gst_adapter_prev_timestamp() here instead? */ buffer = gst_adapter_get_buffer (base_video_decoder->input_adapter); base_video_decoder->buffer_timestamp = GST_BUFFER_TIMESTAMP (buffer); gst_buffer_unref (buffer); do { ret = klass->parse_data (base_video_decoder, FALSE); } while (ret == GST_FLOW_OK); if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) { gst_object_unref (base_video_decoder); return GST_FLOW_OK; } gst_object_unref (base_video_decoder); return ret; }
static GstFlowReturn gst_flxdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) { GstByteReader reader; GstBuffer *input; GstMapInfo map_info; GstCaps *caps; guint available; GstFlowReturn res = GST_FLOW_OK; GstFlxDec *flxdec; FlxHeader *flxh; g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR); flxdec = (GstFlxDec *) parent; g_return_val_if_fail (flxdec != NULL, GST_FLOW_ERROR); gst_adapter_push (flxdec->adapter, buf); available = gst_adapter_available (flxdec->adapter); input = gst_adapter_get_buffer (flxdec->adapter, available); if (!gst_buffer_map (input, &map_info, GST_MAP_READ)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Failed to map buffer"), (NULL)); goto error; } gst_byte_reader_init (&reader, map_info.data, map_info.size); if (flxdec->state == GST_FLXDEC_READ_HEADER) { if (available >= FlxHeaderSize) { GstByteReader header; GstCaps *templ; if (!gst_byte_reader_get_sub_reader (&reader, &header, FlxHeaderSize)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Could not read header"), (NULL)); goto unmap_input_error; } gst_adapter_flush (flxdec->adapter, FlxHeaderSize); available -= FlxHeaderSize; if (!_read_flx_header (flxdec, &header, &flxdec->hdr)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Failed to parse header"), (NULL)); goto unmap_input_error; } flxh = &flxdec->hdr; /* check header */ if (flxh->type != FLX_MAGICHDR_FLI && flxh->type != FLX_MAGICHDR_FLC && flxh->type != FLX_MAGICHDR_FLX) { GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, (NULL), ("not a flx file (type %x)", flxh->type)); goto unmap_input_error; } GST_INFO_OBJECT (flxdec, "size : %d", flxh->size); GST_INFO_OBJECT (flxdec, "frames : %d", flxh->frames); GST_INFO_OBJECT (flxdec, "width : %d", flxh->width); GST_INFO_OBJECT (flxdec, "height : %d", flxh->height); GST_INFO_OBJECT (flxdec, "depth : %d", flxh->depth); GST_INFO_OBJECT (flxdec, "speed : %d", flxh->speed); flxdec->next_time = 0; if (flxh->type == FLX_MAGICHDR_FLI) { flxdec->frame_time = JIFFIE * flxh->speed; } else if (flxh->speed == 0) { flxdec->frame_time = GST_SECOND / 70; } else { flxdec->frame_time = flxh->speed * GST_MSECOND; } flxdec->duration = flxh->frames * flxdec->frame_time; GST_LOG ("duration : %" GST_TIME_FORMAT, GST_TIME_ARGS (flxdec->duration)); templ = gst_pad_get_pad_template_caps (flxdec->srcpad); caps = gst_caps_copy (templ); gst_caps_unref (templ); gst_caps_set_simple (caps, "width", G_TYPE_INT, flxh->width, "height", G_TYPE_INT, flxh->height, "framerate", GST_TYPE_FRACTION, (gint) GST_MSECOND, (gint) flxdec->frame_time / 1000, NULL); gst_pad_set_caps (flxdec->srcpad, caps); gst_caps_unref (caps); if (flxdec->need_segment) { gst_pad_push_event (flxdec->srcpad, gst_event_new_segment (&flxdec->segment)); flxdec->need_segment = FALSE; } /* zero means 8 */ if (flxh->depth == 0) flxh->depth = 8; if (flxh->depth != 8) { GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, ("%s", "Don't know how to decode non 8 bit depth streams"), (NULL)); goto unmap_input_error; } flxdec->converter = flx_colorspace_converter_new (flxh->width, flxh->height); if (flxh->type == FLX_MAGICHDR_FLC || flxh->type == FLX_MAGICHDR_FLX) { GST_INFO_OBJECT (flxdec, "(FLC) aspect_dx : %d", flxh->aspect_dx); GST_INFO_OBJECT (flxdec, "(FLC) aspect_dy : %d", flxh->aspect_dy); GST_INFO_OBJECT (flxdec, "(FLC) oframe1 : 0x%08x", flxh->oframe1); GST_INFO_OBJECT (flxdec, "(FLC) oframe2 : 0x%08x", flxh->oframe2); } flxdec->size = ((guint) flxh->width * (guint) flxh->height); if (flxdec->size >= G_MAXSIZE / 4) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Cannot allocate required memory"), (NULL)); goto unmap_input_error; } /* create delta and output frame */ flxdec->frame_data = g_malloc0 (flxdec->size); flxdec->delta_data = g_malloc0 (flxdec->size); flxdec->state = GST_FLXDEC_PLAYING; } } else if (flxdec->state == GST_FLXDEC_PLAYING) { GstBuffer *out; /* while we have enough data in the adapter */ while (available >= FlxFrameChunkSize && res == GST_FLOW_OK) { guint32 size; guint16 type; if (!gst_byte_reader_get_uint32_le (&reader, &size)) goto parse_error; if (available < size) goto need_more_data; available -= size; gst_adapter_flush (flxdec->adapter, size); if (!gst_byte_reader_get_uint16_le (&reader, &type)) goto parse_error; switch (type) { case FLX_FRAME_TYPE:{ GstByteReader chunks; GstByteWriter writer; guint16 n_chunks; GstMapInfo map; GST_LOG_OBJECT (flxdec, "Have frame type 0x%02x of size %d", type, size); if (!gst_byte_reader_get_sub_reader (&reader, &chunks, size - FlxFrameChunkSize)) goto parse_error; if (!gst_byte_reader_get_uint16_le (&chunks, &n_chunks)) goto parse_error; GST_LOG_OBJECT (flxdec, "Have %d chunks", n_chunks); if (n_chunks == 0) break; if (!gst_byte_reader_skip (&chunks, 8)) /* reserved */ goto parse_error; gst_byte_writer_init_with_data (&writer, flxdec->frame_data, flxdec->size, TRUE); /* decode chunks */ if (!flx_decode_chunks (flxdec, n_chunks, &chunks, &writer)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Could not decode chunk"), NULL); goto unmap_input_error; } gst_byte_writer_reset (&writer); /* save copy of the current frame for possible delta. */ memcpy (flxdec->delta_data, flxdec->frame_data, flxdec->size); out = gst_buffer_new_and_alloc (flxdec->size * 4); if (!gst_buffer_map (out, &map, GST_MAP_WRITE)) { GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Could not map output buffer"), NULL); gst_buffer_unref (out); goto unmap_input_error; } /* convert current frame. */ flx_colorspace_convert (flxdec->converter, flxdec->frame_data, map.data); gst_buffer_unmap (out, &map); GST_BUFFER_TIMESTAMP (out) = flxdec->next_time; flxdec->next_time += flxdec->frame_time; res = gst_pad_push (flxdec->srcpad, out); break; } default: GST_DEBUG_OBJECT (flxdec, "Unknown frame type 0x%02x, skipping %d", type, size); if (!gst_byte_reader_skip (&reader, size - FlxFrameChunkSize)) goto parse_error; break; } } } need_more_data: gst_buffer_unmap (input, &map_info); gst_buffer_unref (input); return res; /* ERRORS */ parse_error: GST_ELEMENT_ERROR (flxdec, STREAM, DECODE, ("%s", "Failed to parse stream"), (NULL)); unmap_input_error: gst_buffer_unmap (input, &map_info); error: gst_buffer_unref (input); return GST_FLOW_ERROR; }
static GstFlowReturn gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf) { GstBaseVideoParse *base_video_parse; GstBaseVideoParseClass *klass; GstBuffer *buffer; GstFlowReturn ret; GST_DEBUG ("chain with %d bytes", GST_BUFFER_SIZE (buf)); base_video_parse = GST_BASE_VIDEO_PARSE (GST_PAD_PARENT (pad)); klass = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); if (!base_video_parse->started) { klass->start (base_video_parse); base_video_parse->started = TRUE; } if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { GST_DEBUG_OBJECT (base_video_parse, "received DISCONT buffer"); gst_base_video_parse_reset (base_video_parse); base_video_parse->discont = TRUE; base_video_parse->have_sync = FALSE; } if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { base_video_parse->last_timestamp = GST_BUFFER_TIMESTAMP (buf); } gst_adapter_push (base_video_parse->input_adapter, buf); if (!base_video_parse->have_sync) { int n, m; GST_DEBUG ("no sync, scanning"); n = gst_adapter_available (base_video_parse->input_adapter); m = klass->scan_for_sync (base_video_parse->input_adapter, FALSE, 0, n); gst_adapter_flush (base_video_parse->input_adapter, m); if (m < n) { GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); /* this is only "maybe" sync */ base_video_parse->have_sync = TRUE; } if (!base_video_parse->have_sync) { return GST_FLOW_OK; } } /* FIXME: use gst_adapter_prev_timestamp() here instead? */ buffer = gst_adapter_get_buffer (base_video_parse->input_adapter); gst_buffer_unref (buffer); /* FIXME check klass->parse_data */ do { ret = klass->parse_data (base_video_parse, FALSE); } while (ret == GST_FLOW_OK); if (ret == GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA) { return GST_FLOW_OK; } return ret; }