static GstFlowReturn
gst_rnd_buffer_size_drain_adapter (GstRndBufferSize * self, gboolean eos)
{
  GstFlowReturn flow;
  GstBuffer *buf;
  guint num_bytes, avail;

  flow = GST_FLOW_OK;

  if (G_UNLIKELY (self->min > self->max))
    goto bogus_minmax;

  do {
    if (self->min != self->max) {
      num_bytes = g_rand_int_range (self->rand, self->min, self->max);
    } else {
      num_bytes = self->min;
    }

    GST_LOG_OBJECT (self, "pulling %u bytes out of adapter", num_bytes);

    buf = gst_adapter_take_buffer (self->adapter, num_bytes);

    if (buf == NULL) {
      if (!eos) {
        GST_LOG_OBJECT (self, "not enough bytes in adapter");
        break;
      }

      avail = gst_adapter_available (self->adapter);

      if (avail == 0)
        break;

      if (avail < self->min) {
        GST_WARNING_OBJECT (self, "discarding %u bytes at end (min=%u)",
            avail, self->min);
        gst_adapter_clear (self->adapter);
        break;
      }
      buf = gst_adapter_take_buffer (self->adapter, avail);
      g_assert (buf != NULL);
    }

    flow = gst_pad_push (self->srcpad, buf);
  }
  while (flow == GST_FLOW_OK);

  return flow;

/* ERRORS */
bogus_minmax:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS,
        ("The minimum buffer size is smaller than the maximum buffer size."),
        ("buffer sizes: max=%d, min=%d", self->min, self->max));
    return GST_FLOW_ERROR;
  }
}
static gboolean
gst_hls_demux_fetch_location (GstHLSDemux * demux, const gchar * uri)
{
  GstStateChangeReturn ret;
  gboolean bret = FALSE;

  g_mutex_lock (demux->fetcher_lock);

  while (demux->fetcher)
    g_cond_wait (demux->fetcher_cond, demux->fetcher_lock);

  if (demux->cancelled)
    goto quit;

  if (!gst_hls_demux_make_fetcher_locked (demux, uri)) {
    goto uri_error;
  }

  ret = gst_element_set_state (demux->fetcher, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE)
    goto state_change_error;

  /* wait until we have fetched the uri */
  GST_DEBUG_OBJECT (demux, "Waiting to fetch the URI");
  g_cond_wait (demux->fetcher_cond, demux->fetcher_lock);

  gst_hls_demux_stop_fetcher_locked (demux, FALSE);

  if (!demux->fetcher_error && gst_adapter_available (demux->download)) {
    GST_INFO_OBJECT (demux, "URI fetched successfully");
    bret = TRUE;
  }
  goto quit;

uri_error:
  {
    GST_ELEMENT_ERROR (demux, RESOURCE, OPEN_READ,
        ("Could not create an element to fetch the given URI."), ("URI: \"%s\"",
            uri));
    bret = FALSE;
    goto quit;
  }

state_change_error:
  {
    GST_ELEMENT_ERROR (demux, CORE, STATE_CHANGE,
        ("Error changing state of the fetcher element."), (NULL));
    bret = FALSE;
    goto quit;
  }

quit:
  {
    /* Unlock any other fetcher that might be waiting */
    g_cond_broadcast (demux->fetcher_cond);
    g_mutex_unlock (demux->fetcher_lock);
    return bret;
  }
}
Exemple #3
0
static GstFlowReturn
gst_raw_parse_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRawParse *rp = GST_RAW_PARSE (gst_pad_get_parent (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  GstRawParseClass *rp_class = GST_RAW_PARSE_GET_CLASS (rp);
  guint buffersize;

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (rp, "received DISCONT buffer");
    gst_adapter_clear (rp->adapter);
    rp->discont = TRUE;
  }

  if (!gst_raw_parse_set_src_caps (rp))
    goto no_caps;

  gst_adapter_push (rp->adapter, buffer);

  if (rp_class->multiple_frames_per_buffer) {
    buffersize = gst_adapter_available (rp->adapter);
    buffersize -= buffersize % rp->framesize;
  } else {
    buffersize = rp->framesize;
  }

  while (gst_adapter_available (rp->adapter) >= buffersize) {
    buffer = gst_adapter_take_buffer (rp->adapter, buffersize);

    ret = gst_raw_parse_push_buffer (rp, buffer);
    if (ret != GST_FLOW_OK)
      break;
  }
done:
  gst_object_unref (rp);

  return ret;

  /* ERRORS */
no_caps:
  {
    GST_ERROR_OBJECT (rp, "could not set caps");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }
}
static gint
gst_vdp_h264_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
    GstAdapter * adapter)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);
  gint m;

  if (h264_dec->packetized)
    return 0;

  m = gst_adapter_masked_scan_uint32 (adapter, 0xffffff00, 0x00000100,
      0, gst_adapter_available (adapter));
  if (m == -1)
    return gst_adapter_available (adapter) - SYNC_CODE_SIZE;

  return m;
}
Exemple #5
0
static GstFlowReturn
gst_rtp_mpv_pay_handle_buffer (GstBaseRTPPayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPMPVPay *rtpmpvpay;
  guint avail, packet_len;
  GstClockTime timestamp, duration;
  GstFlowReturn ret = GST_FLOW_OK;

  rtpmpvpay = GST_RTP_MPV_PAY (basepayload);

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  duration = GST_BUFFER_DURATION (buffer);

  avail = gst_adapter_available (rtpmpvpay->adapter);

  if (duration == -1)
    duration = 0;

  if (rtpmpvpay->first_ts == GST_CLOCK_TIME_NONE || avail == 0)
    rtpmpvpay->first_ts = timestamp;

  if (avail == 0) {
    rtpmpvpay->duration = duration;
  } else {
    rtpmpvpay->duration += duration;
  }

  gst_adapter_push (rtpmpvpay->adapter, buffer);
  avail = gst_adapter_available (rtpmpvpay->adapter);

  /* get packet length of previous data and this new data,
   * payload length includes a 4 byte MPEG video-specific header */
  packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);
  GST_LOG_OBJECT (rtpmpvpay, "available %d, rtp packet length %d", avail,
      packet_len);

  if (gst_basertppayload_is_filled (basepayload,
          packet_len, rtpmpvpay->duration)) {
    ret = gst_rtp_mpv_pay_flush (rtpmpvpay);
  } else {
    rtpmpvpay->first_ts = timestamp;
  }

  return ret;
}
Exemple #6
0
static gboolean
gst_jpeg_parse_sink_event (GstPad * pad, GstEvent * event)
{
  GstJpegParse *parse;
  gboolean res = TRUE;

  parse = GST_JPEG_PARSE (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (parse, "event : %s", GST_EVENT_TYPE_NAME (event));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_STOP:
      parse->priv->next_ts = GST_CLOCK_TIME_NONE;
      parse->priv->duration = GST_CLOCK_TIME_NONE;
      parse->priv->last_offset = 0;
      parse->priv->last_entropy_len = 0;
      parse->priv->last_resync = FALSE;
      gst_adapter_clear (parse->priv->adapter);
      break;
    case GST_EVENT_EOS:{
      /* Push the remaining data, even though it's incomplete */
      guint available = gst_adapter_available (parse->priv->adapter);
      if (available > 0)
        gst_jpeg_parse_push_buffer (parse, available);
      res = gst_pad_push_event (parse->priv->srcpad, event);
      break;
    }
    case GST_EVENT_NEWSEGMENT:
      /* Discard any data in the adapter.  There should have been an EOS before
       * to flush it. */
      gst_adapter_clear (parse->priv->adapter);
      res = gst_pad_push_event (parse->priv->srcpad, event);
      parse->priv->new_segment = TRUE;
      break;
    case GST_EVENT_TAG:{
      if (!parse->priv->new_segment)
        res = gst_pad_event_default (pad, event);
      else {
        GstTagList *taglist = NULL;

        gst_event_parse_tag (event, &taglist);
        /* Hold on to the tags till the srcpad caps are definitely set */
        gst_tag_list_insert (get_tag_list (parse), taglist,
            GST_TAG_MERGE_REPLACE);
        GST_DEBUG ("collected tags: %" GST_PTR_FORMAT, parse->priv->tags);
        gst_event_unref (event);
      }
      break;
    }
    default:
      res = gst_pad_event_default (pad, event);
      break;
  }

  gst_object_unref (parse);
  return res;
}
Exemple #7
0
static gboolean
gst_aiffparse_peek_data (AIFFParse * aiff, guint32 size, const guint8 ** data)
{
  if (gst_adapter_available (aiff->adapter) < size)
    return FALSE;

  *data = gst_adapter_peek (aiff->adapter, size);
  return TRUE;
}
static GstFlowReturn
gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay)
{
  GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
  guint available;
  guint max_payload;
  GstBuffer *outbuf;
  guint8 *payload_data;
  guint frame_count;
  guint payload_length;
  struct rtp_payload *payload;

  if (sbcpay->frame_length == 0) {
    GST_ERROR_OBJECT (sbcpay, "Frame length is 0");
    return GST_FLOW_ERROR;
  }

  available = gst_adapter_available (sbcpay->adapter);

  max_payload =
      gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU (sbcpay) -
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

  max_payload = MIN (max_payload, available);
  frame_count = max_payload / sbcpay->frame_length;
  payload_length = frame_count * sbcpay->frame_length;
  if (payload_length == 0)      /* Nothing to send */
    return GST_FLOW_OK;

  outbuf = gst_rtp_buffer_new_allocate (payload_length +
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);

  /* get payload */
  gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);

  gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (sbcpay));

  /* write header and copy data into payload */
  payload_data = gst_rtp_buffer_get_payload (&rtp);
  payload = (struct rtp_payload *) payload_data;
  memset (payload, 0, sizeof (struct rtp_payload));
  payload->frame_count = frame_count;

  gst_adapter_copy (sbcpay->adapter, payload_data +
      RTP_SBC_PAYLOAD_HEADER_SIZE, 0, payload_length);

  gst_rtp_buffer_unmap (&rtp);

  gst_adapter_flush (sbcpay->adapter, payload_length);

  /* FIXME: what about duration? */
  GST_BUFFER_PTS (outbuf) = sbcpay->timestamp;
  GST_DEBUG_OBJECT (sbcpay, "Pushing %d bytes", payload_length);

  return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (sbcpay), outbuf);
}
Exemple #9
0
static GstFlowReturn
gst_wildmidi_parse_song (GstWildmidi * wildmidi)
{
  struct _WM_Info *info;
  GstCaps *outcaps;
  guint8 *data;
  guint size;

  GST_DEBUG_OBJECT (wildmidi, "Parsing song");

  size = gst_adapter_available (wildmidi->adapter);
  data = gst_adapter_take (wildmidi->adapter, size);

  /* this method takes our memory block */
  GST_OBJECT_LOCK (wildmidi);
  wildmidi->song = WildMidi_OpenBuffer (data, size);

  if (!wildmidi->song)
    goto open_failed;

  WildMidi_LoadSamples (wildmidi->song);

  WildMidi_SetOption (wildmidi->song, WM_MO_LINEAR_VOLUME,
      wildmidi->linear_volume);
  WildMidi_SetOption (wildmidi->song, WM_MO_EXPENSIVE_INTERPOLATION,
      wildmidi->high_quality);

  info = WildMidi_GetInfo (wildmidi->song);
  GST_OBJECT_UNLOCK (wildmidi);

  wildmidi->o_len = info->approx_total_samples;

  outcaps = gst_caps_copy (gst_pad_get_pad_template_caps (wildmidi->srcpad));
  gst_pad_set_caps (wildmidi->srcpad, outcaps);
  gst_caps_unref (outcaps);

  /* we keep an internal segment in samples */
  gst_segment_set_newsegment (wildmidi->o_segment, FALSE, 1.0,
      GST_FORMAT_DEFAULT, 0, GST_CLOCK_TIME_NONE, 0);

  gst_pad_push_event (wildmidi->srcpad,
      gst_wildmidi_get_new_segment_event (wildmidi, GST_FORMAT_TIME));

  GST_DEBUG_OBJECT (wildmidi, "Parsing song done");

  return GST_FLOW_OK;

  /* ERRORS */
open_failed:
  {
    GST_OBJECT_UNLOCK (wildmidi);
    GST_ELEMENT_ERROR (wildmidi, STREAM, DECODE, (NULL),
        ("Unable to parse midi data"));
    return GST_FLOW_ERROR;
  }
}
static GstFlowReturn
gst_rtp_mp2t_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPMP2TPay *rtpmp2tpay;
  guint size, avail, packet_len;
  GstClockTime timestamp, duration;
  GstFlowReturn ret;

  rtpmp2tpay = GST_RTP_MP2T_PAY (basepayload);

  size = gst_buffer_get_size (buffer);
  timestamp = GST_BUFFER_PTS (buffer);
  duration = GST_BUFFER_DURATION (buffer);

again:
  ret = GST_FLOW_OK;
  avail = gst_adapter_available (rtpmp2tpay->adapter);

  /* Initialize new RTP payload */
  if (avail == 0) {
    rtpmp2tpay->first_ts = timestamp;
    rtpmp2tpay->duration = duration;
  }

  /* get packet length of previous data and this new data */
  packet_len = gst_rtp_buffer_calc_packet_len (avail + size, 0, 0);

  /* if this buffer is going to overflow the packet, flush what we have,
   * or if upstream is handing us several packets, to keep latency low */
  if (!size || gst_rtp_base_payload_is_filled (basepayload,
          packet_len, rtpmp2tpay->duration + duration)) {
    ret = gst_rtp_mp2t_pay_flush (rtpmp2tpay);
    rtpmp2tpay->first_ts = timestamp;
    rtpmp2tpay->duration = duration;

    /* keep filling the payload */
  } else {
    if (GST_CLOCK_TIME_IS_VALID (duration))
      rtpmp2tpay->duration += duration;
  }

  /* copy buffer to adapter */
  if (buffer) {
    gst_adapter_push (rtpmp2tpay->adapter, buffer);
    buffer = NULL;
  }

  if (size >= (188 * 2)) {
    size = 0;
    goto again;
  }

  return ret;

}
Exemple #11
0
static GstFlowReturn
gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay)
{
  GstBuffer *outbuf;
  GstFlowReturn ret;
  guint avail;

  guint8 *payload;

  avail = gst_adapter_available (rtpmpvpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint packet_len;
    guint payload_len;

    packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);

    towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmpvpay));

    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 4, 0);

    outbuf = gst_rtp_buffer_new_allocate (payload_len, 4, 0);

    payload = gst_rtp_buffer_get_payload (outbuf);
    /* enable MPEG Video-specific header
     *
     *  0                   1                   2                   3
     *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     * |    MBZ  |T|         TR        | |N|S|B|E|  P  | | BFC | | FFC |
     * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
     *                                  AN              FBV     FFV
     */

    /* fill in the MPEG Video-specific header 
     * data is set to 0x0 here
     */
    memset (payload, 0x0, 4);

    gst_adapter_copy (rtpmpvpay->adapter, payload + 4, 0, payload_len);
    gst_adapter_flush (rtpmpvpay->adapter, payload_len);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (outbuf, avail == 0);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmpvpay->first_ts;

    ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmpvpay), outbuf);
  }

  return ret;
}
Exemple #12
0
static GstFlowReturn
gst_real_audio_demux_parse_data (GstRealAudioDemux * demux)
{
  GstFlowReturn ret = GST_FLOW_OK;
  guint avail, unit_size;

  avail = gst_adapter_available (demux->adapter);

  if (demux->packet_size > 0)
    unit_size = demux->packet_size;
  else
    unit_size = avail & 0xfffffff0;     /* round down to next multiple of 16 */

  GST_LOG_OBJECT (demux, "available = %u, unit_size = %u", avail, unit_size);

  while (ret == GST_FLOW_OK && unit_size > 0 && avail >= unit_size) {
    GstClockTime ts;
    const guint8 *data;
    GstBuffer *buf = NULL;

    buf = gst_buffer_new_and_alloc (unit_size);
    gst_buffer_set_caps (buf, GST_PAD_CAPS (demux->srcpad));

    data = gst_adapter_peek (demux->adapter, unit_size);
    memcpy (GST_BUFFER_DATA (buf), data, unit_size);
    gst_adapter_flush (demux->adapter, unit_size);
    avail -= unit_size;

    if (demux->need_newsegment) {
      gst_pad_push_event (demux->srcpad,
          gst_event_new_new_segment_full (FALSE, demux->segment.rate,
              demux->segment.applied_rate, GST_FORMAT_TIME,
              demux->segment.start, demux->segment.stop, demux->segment.time));
      demux->need_newsegment = FALSE;
    }

    if (demux->pending_tags) {
      gst_element_found_tags_for_pad (GST_ELEMENT (demux), demux->srcpad,
          demux->pending_tags);
      demux->pending_tags = NULL;
    }

    if (demux->fourcc == GST_RM_AUD_DNET) {
      buf = gst_rm_utils_descramble_dnet_buffer (buf);
    }

    ts = gst_real_demux_get_timestamp_from_offset (demux, demux->offset);
    GST_BUFFER_TIMESTAMP (buf) = ts;

    gst_segment_set_last_stop (&demux->segment, GST_FORMAT_TIME, ts);

    ret = gst_pad_push (demux->srcpad, buf);
  }

  return ret;
}
void
gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder * base_video_decoder,
    int n_bytes)
{
  GstBuffer *buf;

  GST_DEBUG ("add to frame");

  if (n_bytes == 0)
    return;

  if (gst_adapter_available (base_video_decoder->output_adapter) == 0) {
    base_video_decoder->frame_offset = base_video_decoder->input_offset -
        gst_adapter_available (base_video_decoder->input_adapter);
  }
  buf = gst_adapter_take_buffer (base_video_decoder->input_adapter, n_bytes);

  gst_adapter_push (base_video_decoder->output_adapter, buf);
}
static gboolean
gst_flac_dec_set_format (GstAudioDecoder * dec, GstCaps * caps)
{
  const GValue *headers;
  GstFlacDec *flacdec;
  GstStructure *s;
  guint i, num;

  flacdec = GST_FLAC_DEC (dec);

  GST_LOG_OBJECT (dec, "sink caps: %" GST_PTR_FORMAT, caps);

  s = gst_caps_get_structure (caps, 0);
  headers = gst_structure_get_value (s, "streamheader");
  if (headers == NULL || !GST_VALUE_HOLDS_ARRAY (headers)) {
    GST_WARNING_OBJECT (dec, "no 'streamheader' field in input caps, try "
        "adding a flacparse element upstream");
    return FALSE;
  }

  if (gst_adapter_available (flacdec->adapter) > 0) {
    GST_WARNING_OBJECT (dec, "unexpected data left in adapter");
    gst_adapter_clear (flacdec->adapter);
  }

  num = gst_value_array_get_size (headers);
  for (i = 0; i < num; ++i) {
    const GValue *header_val;
    GstBuffer *header_buf;

    header_val = gst_value_array_get_value (headers, i);
    if (header_val == NULL || !GST_VALUE_HOLDS_BUFFER (header_val))
      return FALSE;

    header_buf = g_value_dup_boxed (header_val);
    GST_INFO_OBJECT (dec, "pushing header buffer of %" G_GSIZE_FORMAT " bytes "
        "into adapter", gst_buffer_get_size (header_buf));
    gst_adapter_push (flacdec->adapter, header_buf);
  }

  GST_DEBUG_OBJECT (dec, "Processing headers and metadata");
  if (!FLAC__stream_decoder_process_until_end_of_metadata (flacdec->decoder)) {
    GST_WARNING_OBJECT (dec, "process_until_end_of_metadata failed");
    if (FLAC__stream_decoder_get_state (flacdec->decoder) ==
        FLAC__STREAM_DECODER_ABORTED) {
      GST_WARNING_OBJECT (flacdec, "Read callback caused internal abort");
      /* allow recovery */
      gst_adapter_clear (flacdec->adapter);
      FLAC__stream_decoder_flush (flacdec->decoder);
      gst_flac_dec_handle_decoder_error (flacdec, TRUE);
    }
  }
  GST_INFO_OBJECT (dec, "headers and metadata are now processed");
  return TRUE;
}
Exemple #15
0
static GstFlowReturn
gst_flac_dec_handle_frame (GstAudioDecoder * audio_dec, GstBuffer * buf)
{
  GstFlacDec *dec;

  dec = GST_FLAC_DEC (audio_dec);

  /* drain remaining data? */
  if (G_UNLIKELY (buf == NULL)) {
    gst_flac_dec_flush (audio_dec, FALSE);
    return GST_FLOW_OK;
  }

  GST_LOG_OBJECT (dec, "frame: ts %" GST_TIME_FORMAT ", flags 0x%04x, "
      "%" G_GSIZE_FORMAT " bytes", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_BUFFER_FLAGS (buf), gst_buffer_get_size (buf));

  /* drop any in-stream headers, we've processed those in set_format already */
  if (G_UNLIKELY (!dec->got_headers)) {
    gboolean got_audio_frame;
    GstMapInfo map;

    /* check if this is a flac audio frame (rather than a header or junk) */
    gst_buffer_map (buf, &map, GST_MAP_READ);
    got_audio_frame =
        gst_flac_dec_scan_got_frame (dec, map.data, map.size, NULL);
    gst_buffer_unmap (buf, &map);

    if (!got_audio_frame) {
      GST_INFO_OBJECT (dec, "dropping in-stream header, %" G_GSIZE_FORMAT " "
          "bytes", map.size);
      gst_audio_decoder_finish_frame (audio_dec, NULL, 1);
      return GST_FLOW_OK;
    }

    GST_INFO_OBJECT (dec, "first audio frame, got all in-stream headers now");
    dec->got_headers = TRUE;
  }

  gst_adapter_push (dec->adapter, gst_buffer_ref (buf));
  buf = NULL;

  dec->last_flow = GST_FLOW_OK;

  /* framed - there should always be enough data to decode something */
  GST_LOG_OBJECT (dec, "%" G_GSIZE_FORMAT " bytes available",
      gst_adapter_available (dec->adapter));

  if (!FLAC__stream_decoder_process_single (dec->decoder)) {
    GST_INFO_OBJECT (dec, "process_single failed");
  }

  return dec->last_flow;
}
Exemple #16
0
static void
create_fingerprint (GstOFA * ofa)
{
  GstBuffer *buf;
  GstAudioFilter *ofa_filter = GST_AUDIO_FILTER (ofa);
  gint rate = ofa_filter->format.rate;
  gint channels = ofa_filter->format.channels;
  gint endianness =
      ofa_filter->format.bigend ? OFA_BIG_ENDIAN : OFA_LITTLE_ENDIAN;
  GstTagList *tags;
  guint available;

  available = gst_adapter_available (ofa->adapter);

  if (available == 0) {
    GST_WARNING_OBJECT (ofa, "No data to take fingerprint from");
    ofa->record = FALSE;
    return;
  }

  if (GST_AUDIO_FILTER (ofa)->format.bigend)
    endianness = OFA_BIG_ENDIAN;
  else
    endianness = OFA_LITTLE_ENDIAN;


  GST_DEBUG_OBJECT (ofa, "Generating fingerprint for %u samples",
      available / 2);

  buf = gst_adapter_take_buffer (ofa->adapter, available);

  ofa->fingerprint = g_strdup (ofa_create_print (GST_BUFFER_DATA (buf),
          endianness, GST_BUFFER_SIZE (buf) / 2, rate,
          (channels == 2) ? 1 : 0));

  if (ofa->fingerprint) {
    GST_INFO_OBJECT (ofa, "Generated fingerprint: %s", ofa->fingerprint);
  } else {
    GST_WARNING_OBJECT (ofa, "Failed to generate fingerprint");
  }

  gst_buffer_unref (buf);

  if (ofa->fingerprint) {
    tags = gst_tag_list_new ();
    gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
        GST_TAG_OFA_FINGERPRINT, ofa->fingerprint, NULL);
    gst_element_found_tags (GST_ELEMENT (ofa), tags);

    g_object_notify (G_OBJECT (ofa), "fingerprint");
  }

  ofa->record = FALSE;
}
static GstFlowReturn
gst_chop_my_data_process (GstChopMyData * chopmydata, gboolean flush)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *buffer;

  if (chopmydata->next_size == 0) {
    get_next_size (chopmydata);
  }

  while (gst_adapter_available (chopmydata->adapter) >= chopmydata->next_size) {
    buffer =
        gst_adapter_take_buffer (chopmydata->adapter, chopmydata->next_size);

    GST_BUFFER_PTS (buffer) = gst_adapter_prev_pts (chopmydata->adapter, NULL);
    GST_BUFFER_DTS (buffer) = gst_adapter_prev_dts (chopmydata->adapter, NULL);

    chopmydata->next_size = 0;

    ret = gst_pad_push (chopmydata->srcpad, buffer);
    if (ret != GST_FLOW_OK) {
      return ret;
    }

    get_next_size (chopmydata);
  }

  if (flush) {
    guint min_size = chopmydata->min_size;

    while (gst_adapter_available (chopmydata->adapter) >= min_size) {
      buffer = gst_adapter_take_buffer (chopmydata->adapter, min_size);
      ret = gst_pad_push (chopmydata->srcpad, buffer);
      if (ret != GST_FLOW_OK)
        break;
    }
    gst_adapter_clear (chopmydata->adapter);
  }

  return ret;
}
Exemple #18
0
static GstFlowReturn
gst_webrtc_echo_probe_transform_ip (GstBaseTransform * btrans,
    GstBuffer * buffer)
{
  GstWebrtcEchoProbe *self = GST_WEBRTC_ECHO_PROBE (btrans);
  GstBuffer *newbuf = NULL;

  GST_WEBRTC_ECHO_PROBE_LOCK (self);
  newbuf = gst_buffer_copy (buffer);
  /* Moves the buffer timestamp to be in Running time */
  GST_BUFFER_PTS (newbuf) = gst_segment_to_running_time (&btrans->segment,
      GST_FORMAT_TIME, GST_BUFFER_PTS (buffer));
  gst_adapter_push (self->adapter, newbuf);

  if (gst_adapter_available (self->adapter) > MAX_ADAPTER_SIZE)
    gst_adapter_flush (self->adapter,
        gst_adapter_available (self->adapter) - MAX_ADAPTER_SIZE);
  GST_WEBRTC_ECHO_PROBE_UNLOCK (self);

  return GST_FLOW_OK;
}
static gboolean
gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean retry)
{
  GstBuffer *buf;
  guint avail;
  const gchar *next_fragment_uri;
  GstClockTime duration;
  GstClockTime timestamp;
  gboolean discont;

  if (!gst_m3u8_client_get_next_fragment (demux->client, &discont,
          &next_fragment_uri, &duration, &timestamp)) {
    GST_INFO_OBJECT (demux, "This playlist doesn't contain more fragments");
    demux->end_of_playlist = TRUE;
    gst_task_start (demux->task);
    return FALSE;
  }

  GST_INFO_OBJECT (demux, "Fetching next fragment %s", next_fragment_uri);

  if (!gst_hls_demux_fetch_location (demux, next_fragment_uri))
    return FALSE;

  avail = gst_adapter_available (demux->download);
  buf = gst_adapter_take_buffer (demux->download, avail);
  GST_BUFFER_DURATION (buf) = duration;
  GST_BUFFER_TIMESTAMP (buf) = timestamp;

  /* We actually need to do this every time we switch bitrate */
  if (G_UNLIKELY (demux->do_typefind)) {
    GstCaps *caps = gst_type_find_helper_for_buffer (NULL, buf, NULL);

    if (!demux->input_caps || !gst_caps_is_equal (caps, demux->input_caps)) {
      gst_caps_replace (&demux->input_caps, caps);
      /* gst_pad_set_caps (demux->srcpad, demux->input_caps); */
      GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT,
          demux->input_caps);
      demux->do_typefind = FALSE;
    } else
      gst_caps_unref (caps);
  }
  gst_buffer_set_caps (buf, demux->input_caps);

  if (discont) {
    GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous");
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
  }

  g_queue_push_tail (demux->queue, buf);
  gst_task_start (demux->task);
  gst_adapter_clear (demux->download);
  return TRUE;
}
static GstFlowReturn
gst_dvd_sub_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstDvdSubParse *parse = GST_DVD_SUB_PARSE (parent);
  GstAdapter *adapter;
  GstBuffer *outbuf = NULL;
  GstFlowReturn ret = GST_FLOW_OK;

  adapter = parse->adapter;

  GST_LOG_OBJECT (parse, "%" G_GSIZE_FORMAT " bytes, ts: %" GST_TIME_FORMAT,
      gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  gst_adapter_push (adapter, buf);

  if (!parse->needed) {
    guint8 data[2];

    gst_adapter_copy (adapter, data, 0, 2);
    parse->needed = GST_READ_UINT16_BE (data);
  }

  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    if (GST_CLOCK_TIME_IS_VALID (parse->stamp))
      /* normally, we expect only the first fragment to carry a timestamp */
      GST_WARNING_OBJECT (parse, "Received more timestamps than expected.");
    else
      parse->stamp = GST_BUFFER_TIMESTAMP (buf);
  }

  if (parse->needed) {
    guint av;

    av = gst_adapter_available (adapter);
    if (av >= parse->needed) {
      if (av > parse->needed) {
        /* normally, we expect several fragment, boundary aligned */
        GST_WARNING_OBJECT (parse, "Unexpected: needed %d, "
            "but more (%d) is available.", parse->needed, av);
      }
      outbuf = gst_adapter_take_buffer (adapter, parse->needed);
      /* decorate buffer */
      GST_BUFFER_TIMESTAMP (outbuf) = parse->stamp;
      /* reset state */
      parse->stamp = GST_CLOCK_TIME_NONE;
      parse->needed = 0;
      /* and send along */
      ret = gst_pad_push (parse->srcpad, outbuf);
    }
  }

  return ret;
}
static GstFlowReturn
gst_flite_test_src_create (GstBaseSrc * basesrc, guint64 offset,
    guint length, GstBuffer ** buffer)
{
  GstFliteTestSrc *src;
  int n_bytes;

  src = GST_FLITE_TEST_SRC (basesrc);

  n_bytes = src->info.channels * sizeof (gint16) * src->samples_per_buffer;

  while (gst_adapter_available (src->adapter) < n_bytes) {
    GstBuffer *buf;
    char *text;
    int i;
    GstMapInfo map;
    gint16 *data;
    cst_wave *wave;
    gsize size;

    text = get_channel_name (src, src->channel);

    wave = flite_text_to_wave (text, src->voice);
    g_free (text);
    cst_wave_resample (wave, src->info.rate);

    GST_DEBUG ("type %s, sample_rate %d, num_samples %d, num_channels %d",
        wave->type, wave->sample_rate, wave->num_samples, wave->num_channels);

    size = src->info.channels * sizeof (gint16) * wave->num_samples;
    buf = gst_buffer_new_and_alloc (size);

    gst_buffer_map (buf, &map, GST_MAP_WRITE);
    data = (gint16 *) map.data;
    memset (data, 0, size);
    for (i = 0; i < wave->num_samples; i++) {
      data[i * src->info.channels + src->channel] = wave->samples[i];
    }
    gst_buffer_unmap (buf, &map);

    src->channel++;
    if (src->channel == src->info.channels) {
      src->channel = 0;
    }

    gst_adapter_push (src->adapter, buf);
  }

  *buffer = gst_adapter_take_buffer (src->adapter, n_bytes);

  return GST_FLOW_OK;
}
void
gst_base_video_parse_lost_sync (GstBaseVideoParse * base_video_parse)
{
  g_return_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse));

  GST_DEBUG ("lost_sync");

  if (gst_adapter_available (base_video_parse->input_adapter) >= 1) {
    gst_adapter_flush (base_video_parse->input_adapter, 1);
  }

  base_video_parse->have_sync = FALSE;
}
Exemple #23
0
void
gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder)
{
  g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder));

  GST_DEBUG ("lost_sync");

  if (gst_adapter_available (base_video_decoder->input_adapter) >= 1) {
    gst_adapter_flush (base_video_decoder->input_adapter, 1);
  }

  base_video_decoder->have_sync = FALSE;
}
Exemple #24
0
static GstFlowReturn sbc_enc_chain(GstPad *pad, GstBuffer *buffer)
{
	GstSbcEnc *enc = GST_SBC_ENC(gst_pad_get_parent(pad));
	GstAdapter *adapter = enc->adapter;
	GstFlowReturn res = GST_FLOW_OK;

	gst_adapter_push(adapter, buffer);

	while (gst_adapter_available(adapter) >= enc->codesize &&
							res == GST_FLOW_OK) {
		GstBuffer *output;
		GstCaps *caps;
		const guint8 *data;
		gint consumed;

		caps = GST_PAD_CAPS(enc->srcpad);
		res = gst_pad_alloc_buffer_and_set_caps(enc->srcpad,
						GST_BUFFER_OFFSET_NONE,
						enc->frame_length, caps,
						&output);
		if (res != GST_FLOW_OK)
			goto done;

		data = gst_adapter_peek(adapter, enc->codesize);

		consumed = sbc_encode(&enc->sbc, (gpointer) data,
					enc->codesize,
					GST_BUFFER_DATA(output),
					GST_BUFFER_SIZE(output), NULL);
		if (consumed <= 0) {
			GST_DEBUG_OBJECT(enc, "comsumed < 0, codesize: %d",
					enc->codesize);
			break;
		}
		gst_adapter_flush(adapter, consumed);

		GST_BUFFER_TIMESTAMP(output) = GST_BUFFER_TIMESTAMP(buffer);
		/* we have only 1 frame */
		GST_BUFFER_DURATION(output) = enc->frame_duration;

		res = gst_pad_push(enc->srcpad, output);

		if (res != GST_FLOW_OK)
			goto done;
	}

done:
	gst_object_unref(enc);

	return res;
}
static GstFlowReturn
gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay)
{
  guint avail, mtu;
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *outbuf;

  avail = gst_adapter_available (rtpmp2tpay->adapter);

  mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp2tpay);

  while (avail > 0 && (ret == GST_FLOW_OK)) {
    guint towrite;
    guint payload_len;
    guint packet_len;
    GstBuffer *paybuf;

    /* this will be the total length of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, mtu);

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
    payload_len -= payload_len % 188;

    /* need whole packets */
    if (!payload_len)
      break;

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);

    /* get payload */
    paybuf = gst_adapter_take_buffer_fast (rtpmp2tpay->adapter, payload_len);
    gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmp2tpay), outbuf, paybuf, 0);
    outbuf = gst_buffer_append (outbuf, paybuf);
    avail -= payload_len;

    GST_BUFFER_PTS (outbuf) = rtpmp2tpay->first_ts;
    GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;

    GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %u",
        (guint) gst_buffer_get_size (outbuf));

    ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp2tpay), outbuf);
  }

  return ret;
}
Exemple #26
0
static GstFlowReturn
gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
{
  guint avail;
  GstBuffer *outbuf;
  GstFlowReturn ret;

  /* the data available in the adapter is either smaller
   * than the MTU or bigger. In the case it is smaller, the complete
   * adapter contents can be put in one packet. In the case the
   * adapter has more than one MTU, we need to split the MP4V data
   * over multiple packets. */
  avail = gst_adapter_available (rtpmp4vpay->adapter);

  ret = GST_FLOW_OK;

  while (avail > 0) {
    guint towrite;
    guint8 *payload;
    guint payload_len;
    guint packet_len;

    /* this will be the total lenght of the packet */
    packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);

    /* fill one MTU or all available bytes */
    towrite = MIN (packet_len, GST_BASE_RTP_PAYLOAD_MTU (rtpmp4vpay));

    /* this is the payload length */
    payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);

    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    /* copy payload */
    payload = gst_rtp_buffer_get_payload (outbuf);

    gst_adapter_copy (rtpmp4vpay->adapter, payload, 0, payload_len);
    gst_adapter_flush (rtpmp4vpay->adapter, payload_len);

    avail -= payload_len;

    gst_rtp_buffer_set_marker (outbuf, avail == 0);

    GST_BUFFER_TIMESTAMP (outbuf) = rtpmp4vpay->first_timestamp;

    ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtpmp4vpay), outbuf);
  }

  return ret;
}
Exemple #27
0
long
AudioStream::read_bytes (void *data, long length)
{ 
  gssize len = gst_adapter_available (adapter);

  if (len < length)
    length = len;

  GST_LOG ("Reading %d bytes from adapter", (int) length);
  gst_adapter_copy (adapter, data, 0, length);
  gst_adapter_flush (adapter, length);

  return length;
}
static GstFlowReturn
gst_rtp_mpa_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRtpMPAPay *rtpmpapay;
  GstFlowReturn ret;
  guint size, avail;
  guint packet_len;
  GstClockTime duration, timestamp;

  rtpmpapay = GST_RTP_MPA_PAY (basepayload);

  size = gst_buffer_get_size (buffer);
  duration = GST_BUFFER_DURATION (buffer);
  timestamp = GST_BUFFER_PTS (buffer);

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG_OBJECT (rtpmpapay, "DISCONT");
    gst_rtp_mpa_pay_reset (rtpmpapay);
  }

  avail = gst_adapter_available (rtpmpapay->adapter);

  /* get packet length of previous data and this new data,
   * payload length includes a 4 byte header */
  packet_len = gst_rtp_buffer_calc_packet_len (4 + avail + size, 0, 0);

  /* if this buffer is going to overflow the packet, flush what we
   * have. */
  if (gst_rtp_base_payload_is_filled (basepayload,
          packet_len, rtpmpapay->duration + duration)) {
    ret = gst_rtp_mpa_pay_flush (rtpmpapay);
    avail = 0;
  } else {
    ret = GST_FLOW_OK;
  }

  if (avail == 0) {
    GST_DEBUG_OBJECT (rtpmpapay,
        "first packet, save timestamp %" GST_TIME_FORMAT,
        GST_TIME_ARGS (timestamp));
    rtpmpapay->first_ts = timestamp;
    rtpmpapay->duration = 0;
  }

  gst_adapter_push (rtpmpapay->adapter, buffer);
  rtpmpapay->duration = duration;

  return ret;
}
static int
gst_ffmpeg_pipe_read (URLContext * h, unsigned char *buf, int size)
{
  GstFFMpegPipe *ffpipe;
  const guint8 *data;
  guint available;

  ffpipe = (GstFFMpegPipe *) h->priv_data;

  GST_LOG ("requested size %d", size);

  GST_FFMPEG_PIPE_MUTEX_LOCK (ffpipe);

  GST_LOG ("requested size %d", size);

  while ((available = gst_adapter_available (ffpipe->adapter)) < size
      && !ffpipe->eos) {
    GST_DEBUG ("Available:%d, requested:%d", available, size);
    ffpipe->needed = size;
    GST_FFMPEG_PIPE_SIGNAL (ffpipe);
    GST_FFMPEG_PIPE_WAIT (ffpipe);
  }

  size = MIN (available, size);
  if (size) {
    GST_LOG ("Getting %d bytes", size);
    data = gst_adapter_peek (ffpipe->adapter, size);
    memcpy (buf, data, size);
    gst_adapter_flush (ffpipe->adapter, size);
    GST_LOG ("%d bytes left in adapter",
        gst_adapter_available (ffpipe->adapter));
    ffpipe->needed = 0;
  }
  GST_FFMPEG_PIPE_MUTEX_UNLOCK (ffpipe);

  return size;
}
Exemple #30
0
static void
gst_icydemux_parse_and_send_tags (GstICYDemux * icydemux)
{
  GstTagList *tags;
  const guint8 *data;
  int length, i;
  gchar *buffer;
  gchar **strings;

  length = gst_adapter_available (icydemux->meta_adapter);

  data = gst_adapter_peek (icydemux->meta_adapter, length);

  /* Now, copy this to a buffer where we can NULL-terminate it to make things
   * a bit easier, then do that parsing. */
  buffer = g_strndup ((const gchar *) data, length);

  tags = gst_tag_list_new ();
  strings = g_strsplit (buffer, "';", 0);

  for (i = 0; strings[i]; i++) {
    if (!g_ascii_strncasecmp (strings[i], "StreamTitle=", 12)) {
      char *title = gst_icydemux_unicodify (strings[i] + 13);

      if (title && *title) {
        gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE,
            title, NULL);
        g_free (title);
      }
    } else if (!g_ascii_strncasecmp (strings[i], "StreamUrl=", 10)) {
      char *url = gst_icydemux_unicodify (strings[i] + 11);

      if (url && *url) {
        gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_HOMEPAGE,
            url, NULL);
        g_free (url);
      }
    }
  }

  g_strfreev (strings);
  g_free (buffer);
  gst_adapter_clear (icydemux->meta_adapter);

  if (!gst_tag_list_is_empty (tags))
    gst_icydemux_tag_found (icydemux, tags);
  else
    gst_tag_list_free (tags);
}