예제 #1
0
static GstFlowReturn
gst_rsvg_dec_parse (GstVideoDecoder * decoder, GstVideoCodecFrame * frame,
    GstAdapter * adapter, gboolean at_eos)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
  gboolean completed = FALSE;
  const guint8 *data;
  guint size;
  guint i;

  GST_LOG_OBJECT (rsvg, "parse start");
  size = gst_adapter_available (adapter);

  /* "<svg></svg>" */
  if (size < 5 + 6)
    return GST_VIDEO_DECODER_FLOW_NEED_DATA;

  data = gst_adapter_map (adapter, size);
  if (data == NULL) {
    GST_ERROR_OBJECT (rsvg, "Unable to map memory");
    return GST_FLOW_ERROR;
  }
  for (i = 0; i < size - 4; i++) {
    if (memcmp (data + i, "<svg", 4) == 0) {
      gst_adapter_flush (adapter, i);

      size = gst_adapter_available (adapter);
      if (size < 5 + 6)
        return GST_VIDEO_DECODER_FLOW_NEED_DATA;
      data = gst_adapter_map (adapter, size);
      if (data == NULL) {
        GST_ERROR_OBJECT (rsvg, "Unable to map memory");
        return GST_FLOW_ERROR;
      }
      break;
    }
  }
  /* If start wasn't found: */
  if (i == size - 4) {
    gst_adapter_flush (adapter, size - 4);
    return GST_VIDEO_DECODER_FLOW_NEED_DATA;
  }

  for (i = size - 6; i >= 5; i--) {
    if (memcmp (data + i, "</svg>", 6) == 0) {
      completed = TRUE;
      size = i + 6;
      break;
    }
  }

  if (completed) {

    GST_LOG_OBJECT (rsvg, "have complete svg of %u bytes", size);

    gst_video_decoder_add_to_frame (decoder, size);
    return gst_video_decoder_have_frame (decoder);
  }
  return GST_VIDEO_DECODER_FLOW_NEED_DATA;
}
예제 #2
0
static gboolean
gst_wavpack_parse_resync_adapter (GstAdapter * adapter)
{
  const guint8 *buf, *marker;

  guint avail = gst_adapter_available (adapter);

  if (avail < 4)
    return FALSE;

  /* if the marker is at the beginning don't do the expensive search */
  buf = gst_adapter_peek (adapter, 4);
  if (memcmp (buf, "wvpk", 4) == 0)
    return TRUE;

  if (avail == 4)
    return FALSE;

  /* search for the marker in the complete content of the adapter */
  buf = gst_adapter_peek (adapter, avail);
  if (buf && (marker = gst_wavpack_parse_find_marker ((guint8 *) buf, avail))) {
    gst_adapter_flush (adapter, marker - buf);
    return TRUE;
  }

  /* flush everything except the last 4 bytes. they could contain
   * the start of a new marker */
  gst_adapter_flush (adapter, avail - 4);

  return FALSE;
}
예제 #3
0
static GstFlowReturn gst_pocketsphinx_render (GstBaseSink * asink, GstBuffer * buffer)
{
    GstPocketSphinx *sphinxsink = GST_POCKETSPHINX (asink);

    if (!sphinxsink->ad.initialized) {
        g_signal_emit (sphinxsink,
                       gst_pocketsphinx_signals[SIGNAL_INITIALIZATION], 0, NULL);
        /* We initialized the cmd_ln module earlier. */
        fbs_init(0, NULL);
        g_signal_emit (sphinxsink,
                       gst_pocketsphinx_signals[SIGNAL_AFTER_INITIALIZATION], 0, NULL);
        sphinxsink->ad.initialized = TRUE;
    }

    gst_buffer_ref (buffer);
    gst_adapter_push (sphinxsink->adapter, buffer);
  
    while (gst_adapter_available (sphinxsink->adapter) >= REQUIRED_FRAME_BYTES) {
	if (sphinxsink->ad.calibrated) {
            gst_pocketsphinx_process_chunk (sphinxsink);
    	} else {
            gst_pocketsphinx_calibrate_chunk (sphinxsink);
    	}
        gst_adapter_flush (sphinxsink->adapter, REQUIRED_FRAME_BYTES);
    }

    return GST_FLOW_OK;
}
예제 #4
0
static GstFlowReturn
gst_rtp_g723_pay_flush (GstRTPG723Pay * pay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint8 *payload;
  guint avail;

  avail = gst_adapter_available (pay->adapter);

  outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);
  payload = gst_rtp_buffer_get_payload (outbuf);

  GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
  GST_BUFFER_DURATION (outbuf) = pay->duration;

  /* copy G723 data as payload */
  gst_adapter_copy (pay->adapter, payload, 0, avail);

  /* flush bytes from adapter */
  gst_adapter_flush (pay->adapter, avail);
  pay->timestamp = GST_CLOCK_TIME_NONE;
  pay->duration = 0;

  /* set discont and marker */
  if (pay->discont) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    gst_rtp_buffer_set_marker (outbuf, TRUE);
    pay->discont = FALSE;
  }

  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (pay), outbuf);

  return ret;
}
예제 #5
0
static gboolean
gst_droidadec_stop (GstAudioDecoder * decoder)
{
  GstDroidADec *dec = GST_DROIDADEC (decoder);

  GST_DEBUG_OBJECT (dec, "stop");

  if (dec->codec) {
    droid_media_codec_stop (dec->codec);
    droid_media_codec_destroy (dec->codec);
    dec->codec = NULL;
  }

  gst_adapter_flush (dec->adapter, gst_adapter_available (dec->adapter));

  g_mutex_lock (&dec->eos_lock);
  dec->eos = FALSE;
  g_mutex_unlock (&dec->eos_lock);

  gst_buffer_replace (&dec->codec_data, NULL);

  if (dec->codec_type) {
    gst_droid_codec_unref (dec->codec_type);
    dec->codec_type = NULL;
  }

  return TRUE;
}
static GstFlowReturn
gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay)
{
  guint avail;
  guint8 *payload;
  GstFlowReturn ret;
  GstBuffer *outbuf;

  avail = gst_adapter_available (rtpmp2tpay->adapter);
  if (avail == 0)
    return GST_FLOW_OK;
  outbuf = gst_rtp_buffer_new_allocate (avail, 0, 0);

  /* get payload */
  payload = gst_rtp_buffer_get_payload (outbuf);

  /* copy stuff from adapter to payload */
  gst_adapter_copy (rtpmp2tpay->adapter, payload, 0, avail);

  GST_BUFFER_TIMESTAMP (outbuf) = rtpmp2tpay->first_ts;
  GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;

  GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %d",
      GST_BUFFER_SIZE (outbuf));

  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp2tpay), outbuf);

  /* flush the adapter content */
  gst_adapter_flush (rtpmp2tpay->adapter, avail);

  return ret;
}
예제 #7
0
static int
gst_ffmpeg_pipe_read (URLContext * h, unsigned char *buf, int size)
{
    GstFFMpegPipe *ffpipe;
    guint available;

    ffpipe = (GstFFMpegPipe *) h->priv_data;

    GST_LOG ("requested size %d", size);

    GST_FFMPEG_PIPE_MUTEX_LOCK (ffpipe);

    GST_LOG ("requested size %d", size);

    while ((available = gst_adapter_available (ffpipe->adapter)) < size
            && !ffpipe->eos) {
        GST_DEBUG ("Available:%d, requested:%d", available, size);
        ffpipe->needed = size;
        GST_FFMPEG_PIPE_SIGNAL (ffpipe);
        GST_FFMPEG_PIPE_WAIT (ffpipe);
    }

    size = MIN (available, size);
    if (size) {
        GST_LOG ("Getting %d bytes", size);
        gst_adapter_copy (ffpipe->adapter, buf, 0, size);
        gst_adapter_flush (ffpipe->adapter, size);
        GST_LOG ("%d bytes left in adapter",
                 gst_adapter_available (ffpipe->adapter));
        ffpipe->needed = 0;
    }
    GST_FFMPEG_PIPE_MUTEX_UNLOCK (ffpipe);

    return size;
}
static GstFlowReturn
gst_inter_audio_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
  GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink);
  guint n, bpf;
  guint64 period_time, buffer_time;
  guint64 period_samples, buffer_samples;

  GST_DEBUG_OBJECT (interaudiosink, "render %" G_GSIZE_FORMAT,
      gst_buffer_get_size (buffer));
  bpf = interaudiosink->info.bpf;

  g_mutex_lock (&interaudiosink->surface->mutex);

  buffer_time = interaudiosink->surface->audio_buffer_time;
  period_time = interaudiosink->surface->audio_period_time;

  if (buffer_time < period_time) {
    GST_ERROR_OBJECT (interaudiosink,
        "Buffer time smaller than period time (%" GST_TIME_FORMAT " < %"
        GST_TIME_FORMAT ")", GST_TIME_ARGS (buffer_time),
        GST_TIME_ARGS (period_time));
    g_mutex_unlock (&interaudiosink->surface->mutex);
    return GST_FLOW_ERROR;
  }

  buffer_samples =
      gst_util_uint64_scale (buffer_time, interaudiosink->info.rate,
      GST_SECOND);
  period_samples =
      gst_util_uint64_scale (period_time, interaudiosink->info.rate,
      GST_SECOND);

  n = gst_adapter_available (interaudiosink->surface->audio_adapter) / bpf;
  while (n > buffer_samples) {
    GST_DEBUG_OBJECT (interaudiosink, "flushing %" GST_TIME_FORMAT,
        GST_TIME_ARGS (period_time));
    gst_adapter_flush (interaudiosink->surface->audio_adapter,
        period_samples * bpf);
    n -= period_samples;
  }

  n = gst_adapter_available (interaudiosink->input_adapter);
  if (period_samples * bpf > gst_buffer_get_size (buffer) + n) {
    gst_adapter_push (interaudiosink->input_adapter, gst_buffer_ref (buffer));
  } else {
    GstBuffer *tmp;

    if (n > 0) {
      tmp = gst_adapter_take_buffer (interaudiosink->input_adapter, n);
      gst_adapter_push (interaudiosink->surface->audio_adapter, tmp);
    }
    gst_adapter_push (interaudiosink->surface->audio_adapter,
        gst_buffer_ref (buffer));
  }
  g_mutex_unlock (&interaudiosink->surface->mutex);

  return GST_FLOW_OK;
}
예제 #9
0
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay, GstClockTime timestamp,
    GstClockTime duration)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint avail;
  guint8 *payload;
  gint packet_size;
  gint payload_size;

  avail = gst_adapter_available (rtpmpvpay->adapter);
  packet_size = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);

  /* check for the maximum size of the rtp buffer */
  if (packet_size > GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay)) {
    payload_size =
        GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay) -
        gst_rtp_buffer_calc_packet_len (4, 0, 0);
  } else {
    payload_size = avail;
  }
  outbuf = gst_rtp_buffer_new_allocate (4 + payload_size, 0, 0);
  /* enable MPEG Video-specific header
   *
   *  0                   1                   2                   3
   *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
   * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
   *                                  AN              FBV     FFV
   */
  payload = gst_rtp_buffer_get_payload (outbuf);
  /* fill in the MPEG Video-specific header */
  memset (payload, 0x0, 4);
  /* copy stuff from adapter to payload */
  gst_adapter_copy (rtpmpvpay->adapter, payload + 4, 0, payload_size);
  GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;
  GST_BUFFER_DURATION (outbuf) = rtpmpvpay->duration;

  GST_DEBUG_OBJECT (rtpmpvpay, "pushing buffer of size %d",
      GST_BUFFER_SIZE (outbuf));
  ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpvpay), outbuf);
  gst_adapter_flush (rtpmpvpay->adapter, payload_size);

  /* update the timestamp and duration */
  rtpmpvpay->first_ts = timestamp;
  rtpmpvpay->duration = duration;

  /* check if there is enough data for another rtp buffer */
  avail = gst_adapter_available (rtpmpvpay->adapter);
  packet_size = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);

  if (packet_size >= GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay) && ret == GST_FLOW_OK) {
    GST_DEBUG_OBJECT (rtpmpvpay, "Have enough data for another rtp packet");
    ret = gst_rtp_mpv_pay_flush (rtpmpvpay, timestamp, duration);
  }
  return ret;
}
예제 #10
0
static void
chunk_flush (GstAdapter *adapter, Chunk *chunk)
{
  g_return_if_fail (chunk->available == chunk->length);
  
  gst_adapter_flush (adapter, chunk->length);
  chunk_clear (chunk);
}
예제 #11
0
/*
把 buffer 拿出来
*/
guint8 *
gst_adapter_take (GstAdapter * adapter, guint nbytes)
{
	cdata = gst_adapter_peek (adapter, nbytes);
	data = g_malloc (nbytes);
	memcpy (data, cdata, nbytes);
	gst_adapter_flush (adapter, nbytes);	
}
예제 #12
0
static gboolean
theora_enc_read_multipass_cache (GstTheoraEnc * enc)
{
    GstBuffer *cache_buf;
    const guint8 *cache_data;
    gsize bytes_read = 0;
    gssize bytes_consumed = 0;
    GIOStatus stat = G_IO_STATUS_NORMAL;
    gboolean done = FALSE;

    while (!done) {
        if (gst_adapter_available (enc->multipass_cache_adapter) == 0) {
            GstMapInfo minfo;

            cache_buf = gst_buffer_new_allocate (NULL, 512, NULL);

            gst_buffer_map (cache_buf, &minfo, GST_MAP_WRITE);

            stat = g_io_channel_read_chars (enc->multipass_cache_fd,
                                            (gchar *) minfo.data, minfo.size, &bytes_read, NULL);

            if (bytes_read <= 0) {
                gst_buffer_unmap (cache_buf, &minfo);
                gst_buffer_unref (cache_buf);
                break;
            } else {
                gst_buffer_unmap (cache_buf, &minfo);
                gst_buffer_resize (cache_buf, 0, bytes_read);

                gst_adapter_push (enc->multipass_cache_adapter, cache_buf);
            }
        }
        if (gst_adapter_available (enc->multipass_cache_adapter) == 0)
            break;

        bytes_read =
            MIN (gst_adapter_available (enc->multipass_cache_adapter), 512);

        cache_data = gst_adapter_map (enc->multipass_cache_adapter, bytes_read);

        bytes_consumed =
            th_encode_ctl (enc->encoder, TH_ENCCTL_2PASS_IN, (guint8 *) cache_data,
                           bytes_read);
        gst_adapter_unmap (enc->multipass_cache_adapter);

        done = bytes_consumed <= 0;
        if (bytes_consumed > 0)
            gst_adapter_flush (enc->multipass_cache_adapter, bytes_consumed);
    }

    if (stat == G_IO_STATUS_ERROR || (stat == G_IO_STATUS_EOF && bytes_read == 0)
            || bytes_consumed < 0) {
        GST_ELEMENT_ERROR (enc, RESOURCE, READ, (NULL),
                           ("Failed to read multipass cache file"));
        return FALSE;
    }
    return TRUE;
}
예제 #13
0
static int mpegts_demuxer_read_packet(void *opaque, uint8_t *buffer, int size)
{
    MpegTSDemuxer *demuxer = MPEGTS_DEMUXER(opaque);
    int result = 0;

    g_mutex_lock(&demuxer->lock);
    gint available = gst_adapter_available(demuxer->sink_adapter);
    while (available < demuxer->offset + size &&
           !demuxer->is_eos && !demuxer->is_flushing && demuxer->is_reading)
    {
        if (demuxer->adapter_limit_type == UNLIMITED &&
            demuxer->adapter_limit_size - LIMIT_STEP < demuxer->offset + size)
        {
            demuxer->adapter_limit_size += LIMIT_STEP;
            g_cond_signal(&demuxer->del_cond);
        }
        else
            g_cond_wait(&demuxer->add_cond, &demuxer->lock);

        available = gst_adapter_available(demuxer->sink_adapter);
    }

    if (demuxer->is_reading && !demuxer->is_flushing)
    {
        if (demuxer->is_eos && available <= size) {
            demuxer->is_last_buffer_send = TRUE; // Last buffer
            size = available;
        }

        if (size > 0)
        {
            gst_adapter_copy(demuxer->sink_adapter, buffer, demuxer->offset, size);
            if (demuxer->flush_adapter)
                gst_adapter_flush(demuxer->sink_adapter, size);
            else
                demuxer->offset += size;

            g_cond_signal(&demuxer->del_cond);
            result = size;

#ifdef FAKE_ERROR
            demuxer->read_bytes += size;
#endif
        }
    }
    else
        result = 0; // No more data

    g_mutex_unlock(&demuxer->lock);

#ifdef DEBUG_OUTPUT
    if (result <= 0)
        g_print("MpegTS: read_packet result = %d, is_eos=%s, is_reading=%s, is_flushing=%s\n",
                result, BV(demuxer->is_eos), BV(demuxer->is_reading), BV(demuxer->is_flushing));
#endif
    return result;
}
예제 #14
0
static GstFlowReturn
gst_real_audio_demux_parse_data (GstRealAudioDemux * demux)
{
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail, unit_size;

  avail = gst_adapter_available (demux->adapter);

  if (demux->packet_size > 0)
    unit_size = demux->packet_size;
  else
    unit_size = avail & 0xfffffff0;     /* round down to next multiple of 16 */

  GST_LOG_OBJECT (demux, "available = %u, unit_size = %u", avail, unit_size);

  while (ret == GST_FLOW_OK && unit_size > 0 && avail >= unit_size) {
    GstClockTime ts;
    const guint8 *data;
    GstBuffer *buf = NULL;

    buf = gst_buffer_new_and_alloc (unit_size);
    gst_buffer_set_caps (buf, GST_PAD_CAPS (demux->srcpad));

    data = gst_adapter_peek (demux->adapter, unit_size);
    memcpy (GST_BUFFER_DATA (buf), data, unit_size);
    gst_adapter_flush (demux->adapter, unit_size);
    avail -= unit_size;

    if (demux->need_newsegment) {
      gst_pad_push_event (demux->srcpad,
          gst_event_new_new_segment_full (FALSE, demux->segment.rate,
              demux->segment.applied_rate, GST_FORMAT_TIME,
              demux->segment.start, demux->segment.stop, demux->segment.time));
      demux->need_newsegment = FALSE;
    }

    if (demux->pending_tags) {
      gst_element_found_tags_for_pad (GST_ELEMENT (demux), demux->srcpad,
          demux->pending_tags);
      demux->pending_tags = NULL;
    }

    if (demux->fourcc == GST_RM_AUD_DNET) {
      buf = gst_rm_utils_descramble_dnet_buffer (buf);
    }

    ts = gst_real_demux_get_timestamp_from_offset (demux, demux->offset);
    GST_BUFFER_TIMESTAMP (buf) = ts;

    gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME, ts);

    ret = gst_pad_push (demux->srcpad, buf);
  }

  return ret;
}
예제 #15
0
static GstFlowReturn
gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay)
{
  GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
  guint available;
  guint max_payload;
  GstBuffer *outbuf;
  guint8 *payload_data;
  guint frame_count;
  guint payload_length;
  struct rtp_payload *payload;

  if (sbcpay->frame_length == 0) {
    GST_ERROR_OBJECT (sbcpay, "Frame length is 0");
    return GST_FLOW_ERROR;
  }

  available = gst_adapter_available (sbcpay->adapter);

  max_payload =
      gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU (sbcpay) -
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

  max_payload = MIN (max_payload, available);
  frame_count = max_payload / sbcpay->frame_length;
  payload_length = frame_count * sbcpay->frame_length;
  if (payload_length == 0)      /* Nothing to send */
    return GST_FLOW_OK;

  outbuf = gst_rtp_buffer_new_allocate (payload_length +
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

  /* get payload */
  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (sbcpay));

  /* write header and copy data into payload */
  payload_data = gst_rtp_buffer_get_payload (&rtp);
  payload = (struct rtp_payload *) payload_data;
  memset (payload, 0, sizeof (struct rtp_payload));
  payload->frame_count = frame_count;

  gst_adapter_copy (sbcpay->adapter, payload_data +
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length);

  gst_rtp_buffer_unmap (&rtp);

  gst_adapter_flush (sbcpay->adapter, payload_length);

  /* FIXME: what about duration? */
  GST_BUFFER_PTS (outbuf) = sbcpay->timestamp;
  GST_DEBUG_OBJECT (sbcpay, "Pushing %d bytes", payload_length);

  return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (sbcpay), outbuf);
}
예제 #16
0
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint avail;

  guint8 *payload;

  avail = gst_adapter_available (rtpmpvpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint packet_len;
    guint payload_len;

    packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);

    towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay));

    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 4, 0);

    outbuf = gst_rtp_buffer_new_allocate (payload_len, 4, 0);

    payload = gst_rtp_buffer_get_payload (outbuf);
    /* enable MPEG Video-specific header
     *
     *  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     *                                  AN              FBV     FFV
     */

    /* fill in the MPEG Video-specific header 
     * data is set to 0x0 here
     */
    memset (payload, 0x0, 4);

    gst_adapter_copy (rtpmpvpay->adapter, payload + 4, 0, payload_len);
    gst_adapter_flush (rtpmpvpay->adapter, payload_len);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (outbuf, avail == 0);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;

    ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpvpay), outbuf);
  }

  return ret;
}
예제 #17
0
static MetadataParsingReturn
metadataparse_jpeg_iptc (JpegParseData * jpeg_data, guint8 ** buf,
    guint32 * bufsize, guint8 ** next_start, guint32 * next_size)
{

  int ret;

  ret = metadataparse_util_hold_chunk (&jpeg_data->read, buf,
      bufsize, next_start, next_size, jpeg_data->iptc_adapter);


  if (ret == META_PARSING_DONE) {

    const guint8 *buf;
    guint32 size;
    unsigned int iptc_len;
    int res;

    jpeg_data->state = JPEG_PARSE_READING;

    size = gst_adapter_available (*jpeg_data->iptc_adapter);
    buf = gst_adapter_peek (*jpeg_data->iptc_adapter, size);

    /* FIXME: currently we are throwing away others PhotoShop data */
    res = iptc_jpeg_ps3_find_iptc (buf, size, &iptc_len);

    if (res < 0) {
      /* error */
      ret = META_PARSING_ERROR;
    } else if (res == 0) {
      /* no iptc data found */
      gst_adapter_clear (*jpeg_data->iptc_adapter);
    } else {
      gst_adapter_flush (*jpeg_data->iptc_adapter, res);
      size = gst_adapter_available (*jpeg_data->iptc_adapter);
      if (size > iptc_len) {
        GstBuffer *buf;

        buf = gst_adapter_take_buffer (*jpeg_data->iptc_adapter, iptc_len);
        gst_adapter_clear (*jpeg_data->iptc_adapter);
        gst_adapter_push (*jpeg_data->iptc_adapter, buf);
      }
    }

    /* if there is a second Iptc chunk in the file it will be jumped */
    jpeg_data->iptc_adapter = NULL;

  }

  return ret;

}
예제 #18
0
void
gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder)
{
  g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder));

  GST_DEBUG ("lost_sync");

  if (gst_adapter_available (base_video_decoder->input_adapter) >= 1) {
    gst_adapter_flush (base_video_decoder->input_adapter, 1);
  }

  base_video_decoder->have_sync = FALSE;
}
예제 #19
0
void
gst_base_video_parse_lost_sync (GstBaseVideoParse * base_video_parse)
{
  g_return_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse));

  GST_DEBUG ("lost_sync");

  if (gst_adapter_available (base_video_parse->input_adapter) >= 1) {
    gst_adapter_flush (base_video_parse->input_adapter, 1);
  }

  base_video_parse->have_sync = FALSE;
}
static void copyGStreamerBuffersToAudioChannel(GstAdapter* adapter, AudioBus* bus , int channelNumber, size_t framesToProcess)
{
    if (!gst_adapter_available(adapter)) {
        bus->zero();
        return;
    }

    size_t bytes = framesToProcess * sizeof(float);
    if (gst_adapter_available(adapter) >= bytes) {
        gst_adapter_copy(adapter, bus->channel(channelNumber)->mutableData(), 0, bytes);
        gst_adapter_flush(adapter, bytes);
    }
}
예제 #21
0
파일: gstsbcenc.c 프로젝트: Bisheg/bluez
static GstFlowReturn sbc_enc_chain(GstPad *pad, GstBuffer *buffer)
{
	GstSbcEnc *enc = GST_SBC_ENC(gst_pad_get_parent(pad));
	GstAdapter *adapter = enc->adapter;
	GstFlowReturn res = GST_FLOW_OK;

	gst_adapter_push(adapter, buffer);

	while (gst_adapter_available(adapter) >= enc->codesize &&
							res == GST_FLOW_OK) {
		GstBuffer *output;
		GstCaps *caps;
		const guint8 *data;
		gint consumed;

		caps = GST_PAD_CAPS(enc->srcpad);
		res = gst_pad_alloc_buffer_and_set_caps(enc->srcpad,
						GST_BUFFER_OFFSET_NONE,
						enc->frame_length, caps,
						&output);
		if (res != GST_FLOW_OK)
			goto done;

		data = gst_adapter_peek(adapter, enc->codesize);

		consumed = sbc_encode(&enc->sbc, (gpointer) data,
					enc->codesize,
					GST_BUFFER_DATA(output),
					GST_BUFFER_SIZE(output), NULL);
		if (consumed <= 0) {
			GST_DEBUG_OBJECT(enc, "comsumed < 0, codesize: %d",
					enc->codesize);
			break;
		}
		gst_adapter_flush(adapter, consumed);

		GST_BUFFER_TIMESTAMP(output) = GST_BUFFER_TIMESTAMP(buffer);
		/* we have only 1 frame */
		GST_BUFFER_DURATION(output) = enc->frame_duration;

		res = gst_pad_push(enc->srcpad, output);

		if (res != GST_FLOW_OK)
			goto done;
	}

done:
	gst_object_unref(enc);

	return res;
}
예제 #22
0
static GstFlowReturn
gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
{
  guint avail;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  /* the data available in the adapter is either smaller
   * than the MTU or bigger. In the case it is smaller, the complete
   * adapter contents can be put in one packet. In the case the
   * adapter has more than one MTU, we need to split the MP4V data
   * over multiple packets. */
  avail = gst_adapter_available (rtpmp4vpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    guint packet_len;

    /* this will be the total lenght of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmp4vpay));

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    /* copy payload */
    payload = gst_rtp_buffer_get_payload (outbuf);

    gst_adapter_copy (rtpmp4vpay->adapter, payload, 0, payload_len);
    gst_adapter_flush (rtpmp4vpay->adapter, payload_len);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (outbuf, avail == 0);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4vpay->first_timestamp;

    ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), outbuf);
  }

  return ret;
}
예제 #23
0
long
AudioStream::read_bytes (void *data, long length)
{ 
  gssize len = gst_adapter_available (adapter);

  if (len < length)
    length = len;

  GST_LOG ("Reading %d bytes from adapter", (int) length);
  gst_adapter_copy (adapter, data, 0, length);
  gst_adapter_flush (adapter, length);

  return length;
}
예제 #24
0
static void
chunk_skip (GstAdapter *adapter, Chunk *chunk, guint size)
{
  g_return_if_fail (size <= chunk->available);
  chunk->length -= size;
  chunk->available -= size;
  gst_adapter_flush (adapter, size);
  if (chunk->available) {
    chunk->data = gst_adapter_peek (adapter, chunk->available);
    g_assert (chunk->data);
  } else {
    chunk->data = NULL;
  }
  GST_LOG ("skipped %u bytes, %u of %u still available\n", size, 
      chunk->available, chunk->length);
}
예제 #25
0
파일: gstrtpsbcpay.c 프로젝트: Bisheg/bluez
static GstFlowReturn gst_rtp_sbc_pay_flush_buffers(GstRtpSBCPay *sbcpay)
{
	guint available;
	guint max_payload;
	GstBuffer *outbuf;
	guint8 *payload_data;
	guint frame_count;
	guint payload_length;
	struct rtp_payload *payload;

	if (sbcpay->frame_length == 0) {
		GST_ERROR_OBJECT(sbcpay, "Frame length is 0");
		return GST_FLOW_ERROR;
	}

	available = gst_adapter_available(sbcpay->adapter);

	max_payload = gst_rtp_buffer_calc_payload_len(
		GST_BASE_RTP_PAYLOAD_MTU(sbcpay) - RTP_SBC_PAYLOAD_HEADER_SIZE,
		0, 0);

	max_payload = MIN(max_payload, available);
	frame_count = max_payload / sbcpay->frame_length;
	payload_length = frame_count * sbcpay->frame_length;
	if (payload_length == 0) /* Nothing to send */
		return GST_FLOW_OK;

	outbuf = gst_rtp_buffer_new_allocate(payload_length +
			RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

	gst_rtp_buffer_set_payload_type(outbuf,
			GST_BASE_RTP_PAYLOAD_PT(sbcpay));

	payload_data = gst_rtp_buffer_get_payload(outbuf);
	payload = (struct rtp_payload *) payload_data;
	memset(payload, 0, sizeof(struct rtp_payload));
	payload->frame_count = frame_count;

	gst_adapter_copy(sbcpay->adapter, payload_data +
			RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length);
	gst_adapter_flush(sbcpay->adapter, payload_length);

	GST_BUFFER_TIMESTAMP(outbuf) = sbcpay->timestamp;
	GST_DEBUG_OBJECT(sbcpay, "Pushing %d bytes", payload_length);

	return gst_basertppayload_push(GST_BASE_RTP_PAYLOAD(sbcpay), outbuf);
}
예제 #26
0
static gboolean
get_next_chunk (GstAdapter *adapter, Chunk *chunk)
{
  const guint8 *data;
  guint length;
  
  data = gst_adapter_peek (adapter, 8);
  if (!data) return FALSE;
  length = GST_READ_UINT32_BE (data + 4);

  chunk->fourcc = GST_READ_UINT32_LE (data);
  chunk->length = length;
  gst_adapter_flush (adapter, 8);
  chunk->data = gst_adapter_peek (adapter, gst_adapter_available_fast (adapter));
  chunk->available = gst_adapter_available_fast (adapter);
  return TRUE;
}
예제 #27
0
HRESULT
Output::RenderAudioSamples (bool preroll)
{
  uint32_t samplesWritten;

  // guint64 samplesToWrite;

  if (decklinksink->stop) {
    GST_DEBUG ("decklinksink->stop set TRUE!");
    decklinksink->output->BeginAudioPreroll ();
    // running = true;
  } else {
    gconstpointer data;
    int n;

    g_mutex_lock (&decklinksink->audio_mutex);

    n = gst_adapter_available (decklinksink->audio_adapter);
    if (n > 0) {
      data = gst_adapter_map (decklinksink->audio_adapter, n);

      decklinksink->output->ScheduleAudioSamples ((void *) data, n / 4,
          0, 0, &samplesWritten);

      gst_adapter_unmap (decklinksink->audio_adapter);
      gst_adapter_flush (decklinksink->audio_adapter, samplesWritten * 4);
      GST_DEBUG ("wrote %d samples, %d available", samplesWritten, n / 4);

      g_cond_signal (&decklinksink->audio_cond);
    } else {
      if (decklinksink->audio_eos) {
        GstMessage *message;

        message = gst_message_new_eos (GST_OBJECT_CAST (decklinksink));
        gst_message_set_seqnum (message, decklinksink->audio_seqnum);
        gst_element_post_message (GST_ELEMENT_CAST (decklinksink), message);
      }
    }
    g_mutex_unlock (&decklinksink->audio_mutex);

  }

  GST_DEBUG ("RenderAudioSamples");

  return S_OK;
}
static gboolean
scope_parser_buffer_probe(GstPad *pad, GstBuffer *buffer, gpointer user_data)
{
    ScopeParser *parser = (ScopeParser *)user_data;
    guint available = gst_adapter_available(parser->adapter);
    
    // this sucks
    if(available > SCOPE_SIZE) {
        gst_adapter_flush(parser->adapter, available - 30000);
    }
    
    if(buffer != NULL) {
        gst_buffer_ref(buffer);
        gst_adapter_push(parser->adapter, buffer);
    }
    
    return TRUE;
}
예제 #29
0
static void
gst_aiff_parse_ignore_chunk (GstAiffParse * aiff, GstBuffer * buf, guint32 tag,
    guint32 size)
{
  guint flush;

  if (aiff->streaming) {
    if (!gst_aiff_parse_peek_chunk (aiff, &tag, &size))
      return;
  }
  GST_DEBUG_OBJECT (aiff, "Ignoring tag %" GST_FOURCC_FORMAT,
      GST_FOURCC_ARGS (tag));
  flush = 8 + ((size + 1) & ~1);
  aiff->offset += flush;
  if (aiff->streaming) {
    gst_adapter_flush (aiff->adapter, flush);
  } else {
    gst_buffer_unref (buf);
  }
}
예제 #30
0
static GstFlowReturn
gst_inter_audio_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
  GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink);
  int n;

  GST_DEBUG ("render %d", GST_BUFFER_SIZE (buffer));

  g_mutex_lock (interaudiosink->surface->mutex);
  n = gst_adapter_available (interaudiosink->surface->audio_adapter) / 4;
  if (n > (800 * 2 * 2)) {
    GST_INFO ("flushing 800 samples");
    gst_adapter_flush (interaudiosink->surface->audio_adapter, 800 * 4);
    n -= 800;
  }
  gst_adapter_push (interaudiosink->surface->audio_adapter,
      gst_buffer_ref (buffer));
  g_mutex_unlock (interaudiosink->surface->mutex);

  return GST_FLOW_OK;
}