/**
 * gst_base_rtp_audio_payload_flush:
 * @baseaudiopayload: a #GstBaseRTPPayload
 * @payload_len: length of payload
 * @timestamp: a #GstClockTime
 *
 * Create an RTP buffer and store @payload_len bytes of the adapter as the
 * payload. Set the timestamp on the new buffer to @timestamp before pushing
 * the buffer downstream.
 *
 * If @payload_len is -1, all pending bytes will be flushed. If @timestamp is
 * -1, the timestamp will be calculated automatically.
 *
 * Returns: a #GstFlowReturn
 *
 * Since: 0.10.25
 */
GstFlowReturn
gst_base_rtp_audio_payload_flush (GstBaseRTPAudioPayload * baseaudiopayload,
    guint payload_len, GstClockTime timestamp)
{
  GstBaseRTPPayload *basepayload;
  GstBaseRTPAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  guint8 *payload;
  GstFlowReturn ret;
  GstAdapter *adapter;
  guint64 distance;

  priv = baseaudiopayload->priv;
  adapter = priv->adapter;

  basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload);

  if (payload_len == -1)
    payload_len = gst_adapter_available (adapter);

  /* nothing to do, just return */
  if (payload_len == 0)
    return GST_FLOW_OK;

  if (timestamp == -1) {
    /* calculate the timestamp */
    timestamp = gst_adapter_prev_timestamp (adapter, &distance);

    GST_LOG_OBJECT (baseaudiopayload,
        "last timestamp %" GST_TIME_FORMAT ", distance %" G_GUINT64_FORMAT,
        GST_TIME_ARGS (timestamp), distance);

    if (GST_CLOCK_TIME_IS_VALID (timestamp) && distance > 0) {
      /* convert the number of bytes since the last timestamp to time and add to
       * the last seen timestamp */
      timestamp += priv->bytes_to_time (baseaudiopayload, distance);
    }
  }

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list && gst_adapter_available_fast (adapter) >= payload_len) {
    GstBuffer *buffer;
    /* we can quickly take a buffer out of the adapter without having to copy
     * anything. */
    buffer = gst_adapter_take_buffer (adapter, payload_len);

    ret = gst_base_rtp_audio_payload_push_buffer (baseaudiopayload, buffer);
  } else {
    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);

    /* copy payload */
    payload = gst_rtp_buffer_get_payload (outbuf);
    gst_adapter_copy (adapter, payload, 0, payload_len);
    gst_adapter_flush (adapter, payload_len);

    /* set metadata */
    gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
        timestamp);

    ret = gst_basertppayload_push (basepayload, outbuf);
  }

  return ret;
}
Exemplo n.º 2
0
static GstFlowReturn
gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
{
  GstShapeWipe *self = GST_SHAPE_WIPE (GST_PAD_PARENT (pad));
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *mask = NULL, *outbuf = NULL;
  GstClockTime timestamp;
  gboolean new_outbuf = FALSE;

  if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN))
    return GST_FLOW_NOT_NEGOTIATED;

  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  timestamp =
      gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);

  if (GST_CLOCK_TIME_IS_VALID (timestamp))
    gst_object_sync_values (G_OBJECT (self), timestamp);

  GST_DEBUG_OBJECT (self,
      "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %lf",
      GST_TIME_ARGS (timestamp), self->mask_position);

  g_mutex_lock (self->mask_mutex);
  if (!self->mask)
    g_cond_wait (self->mask_cond, self->mask_mutex);

  if (self->mask == NULL) {
    g_mutex_unlock (self->mask_mutex);
    gst_buffer_unref (buffer);
    return GST_FLOW_UNEXPECTED;
  } else {
    mask = gst_buffer_ref (self->mask);
  }
  g_mutex_unlock (self->mask_mutex);

  if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer))) {
    gst_buffer_unref (buffer);
    gst_buffer_unref (mask);
    return GST_FLOW_OK;
  }

  /* Try to blend inplace, if it's not possible
   * get a new buffer from downstream.
   */
  if (!gst_buffer_is_writable (buffer)) {
    ret =
        gst_pad_alloc_buffer_and_set_caps (self->srcpad, GST_BUFFER_OFFSET_NONE,
        GST_BUFFER_SIZE (buffer), GST_PAD_CAPS (self->srcpad), &outbuf);
    if (G_UNLIKELY (ret != GST_FLOW_OK)) {
      gst_buffer_unref (buffer);
      gst_buffer_unref (mask);
      return ret;
    }
    gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_ALL);
    new_outbuf = TRUE;
  } else {
    outbuf = buffer;
  }

  if (self->fmt == GST_VIDEO_FORMAT_AYUV && self->mask_bpp == 16)
    ret = gst_shape_wipe_blend_ayuv_16 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_AYUV)
    ret = gst_shape_wipe_blend_ayuv_8 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_ARGB && self->mask_bpp == 16)
    ret = gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_ARGB)
    ret = gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_BGRA && self->mask_bpp == 16)
    ret = gst_shape_wipe_blend_bgra_16 (self, buffer, mask, outbuf);
  else if (self->fmt == GST_VIDEO_FORMAT_BGRA)
    ret = gst_shape_wipe_blend_bgra_8 (self, buffer, mask, outbuf);
  else
    g_assert_not_reached ();

  gst_buffer_unref (mask);
  if (new_outbuf)
    gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    gst_buffer_unref (outbuf);
    return ret;
  }

  ret = gst_pad_push (self->srcpad, outbuf);
  return ret;
}
static GstFlowReturn
gst_videoframe_audiolevel_asink_chain (GstPad * pad, GstObject * parent,
    GstBuffer * inbuf)
{
  GstClockTime timestamp, cur_time;
  GstVideoFrameAudioLevel *self = GST_VIDEOFRAME_AUDIOLEVEL (parent);
  GstBuffer *buf;
  gsize inbuf_size;
  guint64 start_offset, end_offset;
  GstClockTime running_time;
  gint rate, bpf;
  gboolean discont = FALSE;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);
  running_time =
      gst_segment_to_running_time (&self->asegment, GST_FORMAT_TIME, timestamp);

  rate = GST_AUDIO_INFO_RATE (&self->ainfo);
  bpf = GST_AUDIO_INFO_BPF (&self->ainfo);
  start_offset = gst_util_uint64_scale (timestamp, rate, GST_SECOND);
  inbuf_size = gst_buffer_get_size (inbuf);
  end_offset = start_offset + inbuf_size / bpf;

  g_mutex_lock (&self->mutex);

  if (GST_BUFFER_IS_DISCONT (inbuf)
      || GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_RESYNC)
      || self->first_time == GST_CLOCK_TIME_NONE) {
    discont = TRUE;
  } else {
    guint64 diff, max_sample_diff;

    /* Check discont, based on audiobasesink */
    if (start_offset <= self->next_offset)
      diff = self->next_offset - start_offset;
    else
      diff = start_offset - self->next_offset;

    max_sample_diff =
        gst_util_uint64_scale_int (self->alignment_threshold, rate, GST_SECOND);

    /* Discont! */
    if (G_UNLIKELY (diff >= max_sample_diff)) {
      if (self->discont_wait > 0) {
        if (self->discont_time == GST_CLOCK_TIME_NONE) {
          self->discont_time = timestamp;
        } else if (timestamp - self->discont_time >= self->discont_wait) {
          discont = TRUE;
          self->discont_time = GST_CLOCK_TIME_NONE;
        }
      } else {
        discont = TRUE;
      }
    } else if (G_UNLIKELY (self->discont_time != GST_CLOCK_TIME_NONE)) {
      /* we have had a discont, but are now back on track! */
      self->discont_time = GST_CLOCK_TIME_NONE;
    }
  }

  if (discont) {
    /* Have discont, need resync */
    if (self->next_offset != -1)
      GST_INFO_OBJECT (pad, "Have discont. Expected %"
          G_GUINT64_FORMAT ", got %" G_GUINT64_FORMAT,
          self->next_offset, start_offset);
    self->total_frames = 0;
    self->first_time = running_time;
    self->next_offset = end_offset;
  } else {
    self->next_offset += inbuf_size / bpf;
  }

  gst_adapter_push (self->adapter, gst_buffer_ref (inbuf));

  GST_DEBUG_OBJECT (self, "Queue length %i",
      g_queue_get_length (&self->vtimeq));

  while (TRUE) {
    GstClockTime *vt0, *vt1;
    GstClockTime vtemp;
    GstMessage *msg;
    gsize bytes, available_bytes;

    vtemp = GST_CLOCK_TIME_NONE;

    while (!(g_queue_get_length (&self->vtimeq) >= 2 || self->video_eos_flag
            || self->audio_flush_flag || self->shutdown_flag))
      g_cond_wait (&self->cond, &self->mutex);

    if (self->audio_flush_flag || self->shutdown_flag) {
      g_mutex_unlock (&self->mutex);
      gst_buffer_unref (inbuf);
      return GST_FLOW_FLUSHING;
    } else if (self->video_eos_flag) {
      GST_DEBUG_OBJECT (self, "Video EOS flag alert");
      /* nothing to do here if queue is empty */
      if (g_queue_get_length (&self->vtimeq) == 0)
        break;

      if (g_queue_get_length (&self->vtimeq) < 2) {
        vtemp = self->vsegment.position;
      } else if (self->vsegment.position == GST_CLOCK_TIME_NONE) {
        /* g_queue_get_length is surely >= 2 at this point
         * so the adapter isn't empty */
        buf =
            gst_adapter_take_buffer (self->adapter,
            gst_adapter_available (self->adapter));
        if (buf != NULL) {
          GstMessage *msg;
          msg = update_rms_from_buffer (self, buf);
          g_mutex_unlock (&self->mutex);
          gst_element_post_message (GST_ELEMENT (self), msg);
          gst_buffer_unref (buf);
          g_mutex_lock (&self->mutex);  /* we unlock again later */
        }
        break;
      }
    } else if (g_queue_get_length (&self->vtimeq) < 2) {
      continue;
    }

    vt0 = g_queue_pop_head (&self->vtimeq);
    if (vtemp == GST_CLOCK_TIME_NONE)
      vt1 = g_queue_peek_head (&self->vtimeq);
    else
      vt1 = &vtemp;

    cur_time =
        self->first_time + gst_util_uint64_scale (self->total_frames,
        GST_SECOND, rate);
    GST_DEBUG_OBJECT (self,
        "Processing: current time is %" GST_TIME_FORMAT,
        GST_TIME_ARGS (cur_time));
    GST_DEBUG_OBJECT (self, "Total frames is %i with a rate of %d",
        self->total_frames, rate);
    GST_DEBUG_OBJECT (self, "Start time is %" GST_TIME_FORMAT,
        GST_TIME_ARGS (self->first_time));
    GST_DEBUG_OBJECT (self, "Time on top is %" GST_TIME_FORMAT,
        GST_TIME_ARGS (*vt0));

    if (cur_time < *vt0) {
      guint num_frames =
          gst_util_uint64_scale (*vt0 - cur_time, rate, GST_SECOND);
      bytes = num_frames * GST_AUDIO_INFO_BPF (&self->ainfo);
      available_bytes = gst_adapter_available (self->adapter);
      if (available_bytes == 0) {
        g_queue_push_head (&self->vtimeq, vt0);
        break;
      }
      if (bytes == 0) {
        cur_time = *vt0;
      } else {
        GST_DEBUG_OBJECT (self,
            "Flushed %" G_GSIZE_FORMAT " out of %" G_GSIZE_FORMAT " bytes",
            bytes, available_bytes);
        gst_adapter_flush (self->adapter, MIN (bytes, available_bytes));
        self->total_frames += num_frames;
        if (available_bytes <= bytes) {
          g_queue_push_head (&self->vtimeq, vt0);
          break;
        }
        cur_time =
            self->first_time + gst_util_uint64_scale (self->total_frames,
            GST_SECOND, rate);
      }
    }
    if (*vt1 > cur_time) {
      bytes =
          GST_AUDIO_INFO_BPF (&self->ainfo) * gst_util_uint64_scale (*vt1 -
          cur_time, rate, GST_SECOND);
    } else {
      bytes = 0;                /* We just need to discard vt0 */
    }
    available_bytes = gst_adapter_available (self->adapter);
    GST_DEBUG_OBJECT (self,
        "Adapter contains %" G_GSIZE_FORMAT " out of %" G_GSIZE_FORMAT " bytes",
        available_bytes, bytes);

    if (available_bytes < bytes) {
      g_queue_push_head (&self->vtimeq, vt0);
      goto done;
    }

    if (bytes > 0) {
      buf = gst_adapter_take_buffer (self->adapter, bytes);
      g_assert (buf != NULL);
    } else {
      /* Just an empty buffer */
      buf = gst_buffer_new ();
    }
    msg = update_rms_from_buffer (self, buf);
    g_mutex_unlock (&self->mutex);
    gst_element_post_message (GST_ELEMENT (self), msg);
    g_mutex_lock (&self->mutex);

    gst_buffer_unref (buf);
    g_free (vt0);
    if (available_bytes == bytes)
      break;
  }
done:
  g_mutex_unlock (&self->mutex);
  return gst_pad_push (self->asrcpad, inbuf);
}
/* Called when pipeline is pre-rolled */
static void
discoverer_collect (GstDiscoverer * dc)
{
  GST_DEBUG ("Collecting information");

  /* Stop the timeout handler if present */
  if (dc->priv->timeoutid) {
    g_source_remove (dc->priv->timeoutid);
    dc->priv->timeoutid = 0;
  }

  if (dc->priv->streams) {
    /* FIXME : Make this querying optional */
    if (TRUE) {
      GstElement *pipeline = (GstElement *) dc->priv->pipeline;
      GstFormat format = GST_FORMAT_TIME;
      gint64 dur;

      GST_DEBUG ("Attempting to query duration");

      if (gst_element_query_duration (pipeline, &format, &dur)) {
        if (format == GST_FORMAT_TIME) {
          GST_DEBUG ("Got duration %" GST_TIME_FORMAT, GST_TIME_ARGS (dur));
          dc->priv->current_info->duration = (guint64) dur;
        }
      }

      if (dc->priv->seeking_query) {
        if (gst_element_query (pipeline, dc->priv->seeking_query)) {
          gboolean seekable;

          gst_query_parse_seeking (dc->priv->seeking_query, &format,
              &seekable, NULL, NULL);
          if (format == GST_FORMAT_TIME) {
            GST_DEBUG ("Got seekable %d", seekable);
            dc->priv->current_info->seekable = seekable;
          }
        }
      }
    }

    if (dc->priv->current_topology)
      dc->priv->current_info->stream_info = parse_stream_topology (dc,
          dc->priv->current_topology, NULL);

    /*
     * Images need some special handling. They do not have a duration, have
     * caps named image/<foo> (th exception being MJPEG video which is also
     * type image/jpeg), and should consist of precisely one stream (actually
     * initially there are 2, the image and raw stream, but we squash these
     * while parsing the stream topology). At some ponit, if we find that these
     * conditions are not sufficient, we can count the number of decoders and
     * parsers in the chain, and if there's more than one decoder, or any
     * parser at all, we should not mark this as an image.
     */
    if (dc->priv->current_info->duration == 0 &&
        dc->priv->current_info->stream_info != NULL &&
        dc->priv->current_info->stream_info->next == NULL) {
      GstStructure *st =
          gst_caps_get_structure (dc->priv->current_info->stream_info->caps, 0);

      if (g_str_has_prefix (gst_structure_get_name (st), "image/"))
        ((GstDiscovererVideoInfo *) dc->priv->current_info->
            stream_info)->is_image = TRUE;
    }
  }

  if (dc->priv->async) {
    GST_DEBUG ("Emitting 'discoverered'");
    g_signal_emit (dc, gst_discoverer_signals[SIGNAL_DISCOVERED], 0,
        dc->priv->current_info, dc->priv->current_error);
    /* Clients get a copy of current_info since it is a boxed type */
    gst_discoverer_info_unref (dc->priv->current_info);
  }
}
Exemplo n.º 5
0
static gboolean
gst_rtp_jitter_buffer_sink_event (GstPad * pad, GstEvent * event)
{
  gboolean ret = TRUE;
  GstRtpJitterBuffer *jitterbuffer;
  GstRtpJitterBufferPrivate *priv;

  jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
  priv = jitterbuffer->priv;

  GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_NEWSEGMENT:
    {
      GstFormat format;
      gdouble rate, arate;
      gint64 start, stop, time;
      gboolean update;

      gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
          &start, &stop, &time);

      /* we need time for now */
      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      GST_DEBUG_OBJECT (jitterbuffer,
          "newsegment: update %d, rate %g, arate %g, start %" GST_TIME_FORMAT
          ", stop %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT,
          update, rate, arate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
          GST_TIME_ARGS (time));

      /* now configure the values, we need these to time the release of the
       * buffers on the srcpad. */
      gst_segment_set_newsegment_full (&priv->segment, update,
          rate, arate, format, start, stop, time);

      /* FIXME, push SEGMENT in the queue. Sorting order might be difficult. */
      ret = gst_pad_push_event (priv->srcpad, event);
      break;
    }
    case GST_EVENT_FLUSH_START:
      gst_rtp_jitter_buffer_flush_start (jitterbuffer);
      ret = gst_pad_push_event (priv->srcpad, event);
      break;
    case GST_EVENT_FLUSH_STOP:
      ret = gst_pad_push_event (priv->srcpad, event);
      ret = gst_rtp_jitter_buffer_src_activate_push (priv->srcpad, TRUE);
      break;
    case GST_EVENT_EOS:
    {
      /* push EOS in queue. We always push it at the head */
      JBUF_LOCK (priv);
      /* check for flushing, we need to discard the event and return FALSE when
       * we are flushing */
      ret = priv->srcresult == GST_FLOW_OK;
      if (ret && !priv->eos) {
        GST_DEBUG_OBJECT (jitterbuffer, "queuing EOS");
        priv->eos = TRUE;
        JBUF_SIGNAL (priv);
      } else if (priv->eos) {
        GST_DEBUG_OBJECT (jitterbuffer, "dropping EOS, we are already EOS");
      } else {
        GST_DEBUG_OBJECT (jitterbuffer, "dropping EOS, reason %s",
            gst_flow_get_name (priv->srcresult));
      }
      JBUF_UNLOCK (priv);
      gst_event_unref (event);
      break;
    }
    default:
      ret = gst_pad_push_event (priv->srcpad, event);
      break;
  }

done:
  gst_object_unref (jitterbuffer);

  return ret;

  /* ERRORS */
newseg_wrong_format:
  {
    GST_DEBUG_OBJECT (jitterbuffer, "received non TIME newsegment");
    ret = FALSE;
    goto done;
  }
}
Exemplo n.º 6
0
GstIsoffParserResult
gst_isoff_sidx_parser_add_buffer (GstSidxParser * parser, GstBuffer * buffer,
    guint * consumed)
{
  GstIsoffParserResult res = GST_ISOFF_PARSER_OK;
  GstByteReader reader;
  GstMapInfo info;
  gsize remaining;
  guint32 fourcc;

  if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) {
    *consumed = 0;
    return GST_ISOFF_PARSER_ERROR;
  }

  gst_byte_reader_init (&reader, info.data, info.size);

  switch (parser->status) {
    case GST_ISOFF_SIDX_PARSER_INIT:
      if (gst_byte_reader_get_remaining (&reader) < GST_ISOFF_FULL_BOX_SIZE) {
        break;
      }

      parser->size = gst_byte_reader_get_uint32_be_unchecked (&reader);
      fourcc = gst_byte_reader_get_uint32_le_unchecked (&reader);
      if (fourcc != GST_ISOFF_FOURCC_SIDX) {
        res = GST_ISOFF_PARSER_UNEXPECTED;
        gst_byte_reader_set_pos (&reader, 0);
        break;
      }
      if (parser->size == 1) {
        if (gst_byte_reader_get_remaining (&reader) < 12) {
          gst_byte_reader_set_pos (&reader, 0);
          break;
        }

        parser->size = gst_byte_reader_get_uint64_be_unchecked (&reader);
      }
      if (parser->size == 0) {
        res = GST_ISOFF_PARSER_ERROR;
        gst_byte_reader_set_pos (&reader, 0);
        break;
      }
      parser->sidx.version = gst_byte_reader_get_uint8_unchecked (&reader);
      parser->sidx.flags = gst_byte_reader_get_uint24_le_unchecked (&reader);

      parser->status = GST_ISOFF_SIDX_PARSER_HEADER;

    case GST_ISOFF_SIDX_PARSER_HEADER:
      remaining = gst_byte_reader_get_remaining (&reader);
      if (remaining < 12 + (parser->sidx.version == 0 ? 8 : 16)) {
        break;
      }

      parser->sidx.ref_id = gst_byte_reader_get_uint32_be_unchecked (&reader);
      parser->sidx.timescale =
          gst_byte_reader_get_uint32_be_unchecked (&reader);
      if (parser->sidx.version == 0) {
        parser->sidx.earliest_pts =
            gst_byte_reader_get_uint32_be_unchecked (&reader);
        parser->sidx.first_offset =
            gst_byte_reader_get_uint32_be_unchecked (&reader);
      } else {
        parser->sidx.earliest_pts =
            gst_byte_reader_get_uint64_be_unchecked (&reader);
        parser->sidx.first_offset =
            gst_byte_reader_get_uint64_be_unchecked (&reader);
      }
      /* skip 2 reserved bytes */
      gst_byte_reader_skip_unchecked (&reader, 2);
      parser->sidx.entries_count =
          gst_byte_reader_get_uint16_be_unchecked (&reader);

      GST_LOG ("Timescale: %" G_GUINT32_FORMAT, parser->sidx.timescale);
      GST_LOG ("Earliest pts: %" G_GUINT64_FORMAT, parser->sidx.earliest_pts);
      GST_LOG ("First offset: %" G_GUINT64_FORMAT, parser->sidx.first_offset);

      parser->cumulative_pts =
          gst_util_uint64_scale_int_round (parser->sidx.earliest_pts,
          GST_SECOND, parser->sidx.timescale);

      if (parser->sidx.entries_count) {
        parser->sidx.entries =
            g_malloc (sizeof (GstSidxBoxEntry) * parser->sidx.entries_count);
      }
      parser->sidx.entry_index = 0;

      parser->status = GST_ISOFF_SIDX_PARSER_DATA;

    case GST_ISOFF_SIDX_PARSER_DATA:
      while (parser->sidx.entry_index < parser->sidx.entries_count) {
        GstSidxBoxEntry *entry =
            &parser->sidx.entries[parser->sidx.entry_index];

        remaining = gst_byte_reader_get_remaining (&reader);
        if (remaining < 12)
          break;

        entry->offset = parser->cumulative_entry_size;
        entry->pts = parser->cumulative_pts;
        gst_isoff_parse_sidx_entry (entry, &reader);
        entry->duration = gst_util_uint64_scale_int_round (entry->duration,
            GST_SECOND, parser->sidx.timescale);
        parser->cumulative_entry_size += entry->size;
        parser->cumulative_pts += entry->duration;

        GST_LOG ("Sidx entry %d) offset: %" G_GUINT64_FORMAT ", pts: %"
            GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT " - size %"
            G_GUINT32_FORMAT, parser->sidx.entry_index, entry->offset,
            GST_TIME_ARGS (entry->pts), GST_TIME_ARGS (entry->duration),
            entry->size);

        parser->sidx.entry_index++;
      }

      if (parser->sidx.entry_index == parser->sidx.entries_count)
        parser->status = GST_ISOFF_SIDX_PARSER_FINISHED;
      else
        break;
    case GST_ISOFF_SIDX_PARSER_FINISHED:
      parser->sidx.entry_index = 0;
      res = GST_ISOFF_PARSER_DONE;
      break;
  }

  *consumed = gst_byte_reader_get_pos (&reader);
  gst_buffer_unmap (buffer, &info);
  return res;
}
Exemplo n.º 7
0
/* Delayed seek callback. This gets called by the timer setup in the above function. */
static gboolean delayed_seek_cb (CustomData *data) {
  GST_DEBUG ("Doing delayed seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (data->desired_position));
  execute_seek (data->desired_position, data);
  return FALSE;
}
static gboolean
gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_EOS:
      if (!base_video_decoder->packetized)
        gst_base_video_decoder_drain (base_video_decoder, TRUE);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;

    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      double rate;
      double applied_rate;
      GstFormat format;
      gint64 start;
      gint64 stop;
      gint64 position;
      GstSegment *segment = &base_video_decoder->segment;

      gst_event_parse_new_segment_full (event, &update, &rate,
          &applied_rate, &format, &start, &stop, &position);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (!update) {
        gst_base_video_decoder_flush (base_video_decoder);
      }

      base_video_decoder->timestamp_offset = start;

      gst_segment_set_newsegment_full (segment,
          update, rate, applied_rate, format, start, stop, position);
      base_video_decoder->have_segment = TRUE;

      GST_WARNING ("new segment: format %d rate %g start %" GST_TIME_FORMAT
          " stop %" GST_TIME_FORMAT
          " position %" GST_TIME_FORMAT
          " update %d",
          format, rate,
          GST_TIME_ARGS (segment->start),
          GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time), update);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;
    }

    case GST_EVENT_FLUSH_STOP:
      gst_base_video_decoder_flush (base_video_decoder);
      gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
          (base_video_decoder), event);
      break;

    default:
      res = gst_pad_event_default (pad, event);
      break;
  }

done:
  gst_object_unref (base_video_decoder);
  return res;

newseg_wrong_format:
  GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;
}
static gboolean
gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoDecoder *base_video_decoder;
  gboolean res = FALSE;

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:

      /* FIXME: do seek using bitrate incase upstream doesn't handle it */
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);

      break;

    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;
      GstClockTime duration;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_decoder);
      base_video_decoder->proportion = proportion;
      if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
        if (G_UNLIKELY (diff > 0)) {
          if (base_video_decoder->state.fps_n > 0)
            duration =
                gst_util_uint64_scale (GST_SECOND,
                base_video_decoder->state.fps_d,
                base_video_decoder->state.fps_n);
          else
            duration = 0;
          base_video_decoder->earliest_time = timestamp + 2 * diff + duration;
        } else {
          base_video_decoder->earliest_time = timestamp + diff;
        }
      } else {
        base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE;
      }
      GST_OBJECT_UNLOCK (base_video_decoder);

      GST_DEBUG_OBJECT (base_video_decoder,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
          GST_TIME_ARGS (timestamp), diff, proportion);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
    }

    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_DECODER_SINK_PAD
          (base_video_decoder), event);
      break;
  }

  gst_object_unref (base_video_decoder);
  return res;
}
Exemplo n.º 10
0
static GstFlowReturn
gst_mve_demux_chain (GstPad * sinkpad, GstBuffer * inbuf)
{
  GstMveDemux *mve = GST_MVE_DEMUX (GST_PAD_PARENT (sinkpad));
  GstFlowReturn ret = GST_FLOW_OK;

  gst_adapter_push (mve->adapter, inbuf);

  GST_DEBUG_OBJECT (mve, "queuing buffer, needed:%d, available:%u",
      mve->needed_bytes, gst_adapter_available (mve->adapter));

  while ((gst_adapter_available (mve->adapter) >= mve->needed_bytes) &&
      (ret == GST_FLOW_OK)) {
    GstMveDemuxStream *stream = NULL;
    GstBuffer *outbuf = NULL;

    switch (mve->state) {
      case MVEDEMUX_STATE_INITIAL:
        gst_adapter_flush (mve->adapter, mve->needed_bytes);

        mve->chunk_offset += mve->needed_bytes;
        mve->needed_bytes = 4;
        mve->state = MVEDEMUX_STATE_NEXT_CHUNK;
        break;

      case MVEDEMUX_STATE_NEXT_CHUNK:{
        const guint8 *data;
        guint16 size;

        data = gst_adapter_peek (mve->adapter, mve->needed_bytes);
        size = GST_MVE_SEGMENT_SIZE (data);

        if (mve->chunk_offset >= mve->chunk_size) {
          /* new chunk, flush buffer and proceed with next segment */
          guint16 chunk_type = GST_READ_UINT16_LE (data + 2);

          gst_adapter_flush (mve->adapter, mve->needed_bytes);
          mve->chunk_size = size;
          mve->chunk_offset = 0;

          if (chunk_type > MVE_CHUNK_END) {
            GST_WARNING_OBJECT (mve,
                "skipping unknown chunk type 0x%02x of size:%u", chunk_type,
                size);
            mve->needed_bytes += size;
            mve->state = MVEDEMUX_STATE_SKIP;
          } else {
            GST_DEBUG_OBJECT (mve, "found new chunk type 0x%02x of size:%u",
                chunk_type, size);
          }
        } else if (mve->chunk_offset <= mve->chunk_size) {
          /* new segment */
          GST_DEBUG_OBJECT (mve, "found segment type 0x%02x of size:%u",
              GST_MVE_SEGMENT_TYPE (data), size);

          mve->needed_bytes += size;
          mve->state = MVEDEMUX_STATE_MOVIE;
        }
      }
        break;

      case MVEDEMUX_STATE_MOVIE:
        ret = gst_mve_parse_segment (mve, &stream, &outbuf);

        if ((ret == GST_FLOW_OK) && (outbuf != NULL)) {
          /* send buffer */
          GST_DEBUG_OBJECT (mve,
              "pushing buffer with time %" GST_TIME_FORMAT
              " (%u bytes) on pad %s",
              GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
              GST_BUFFER_SIZE (outbuf), GST_PAD_NAME (stream->pad));

          ret = gst_pad_push (stream->pad, outbuf);
          stream->last_flow = ret;
        }

        if (ret == GST_FLOW_NOT_LINKED) {
          if (mve->audio_stream
              && mve->audio_stream->last_flow != GST_FLOW_NOT_LINKED)
            ret = GST_FLOW_OK;
          if (mve->video_stream
              && mve->video_stream->last_flow != GST_FLOW_NOT_LINKED)
            ret = GST_FLOW_OK;
        }

        /* update current offset */
        mve->chunk_offset += mve->needed_bytes;

        mve->state = MVEDEMUX_STATE_NEXT_CHUNK;
        mve->needed_bytes = 4;
        break;

      case MVEDEMUX_STATE_SKIP:
        mve->chunk_offset += mve->needed_bytes;
        gst_adapter_flush (mve->adapter, mve->needed_bytes);
        mve->state = MVEDEMUX_STATE_NEXT_CHUNK;
        mve->needed_bytes = 4;
        break;

      default:
        GST_ERROR_OBJECT (mve, "invalid state: %d", mve->state);
        break;
    }
  }

  return ret;
}
static gboolean
theora_parse_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
{
  GstTheoraParse *parse;
  gboolean res = FALSE;

  parse = GST_THEORA_PARSE (parent);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
    {
      gint64 frame, value;
      GstFormat my_format, format;
      gint64 time;

      frame = parse->prev_frame;

      GST_LOG_OBJECT (parse,
          "query %p: we have current frame: %" G_GINT64_FORMAT, query, frame);

      /* parse format */
      gst_query_parse_position (query, &format, NULL);

      /* and convert to the final format in two steps with time as the 
       * intermediate step */
      my_format = GST_FORMAT_TIME;
      if (!(res =
              theora_parse_src_convert (parse->sinkpad, GST_FORMAT_DEFAULT,
                  frame, &my_format, &time)))
        goto error;

      /* fixme: handle segments
         time = (time - parse->segment.start) + parse->segment.time;
       */

      GST_LOG_OBJECT (parse,
          "query %p: our time: %" GST_TIME_FORMAT " (conv to %s)",
          query, GST_TIME_ARGS (time), gst_format_get_name (format));

      if (!(res =
              theora_parse_src_convert (pad, my_format, time, &format, &value)))
        goto error;

      gst_query_set_position (query, format, value);

      GST_LOG_OBJECT (parse,
          "query %p: we return %" G_GINT64_FORMAT " (format %u)", query, value,
          format);

      break;
    }
    case GST_QUERY_DURATION:
      /* forward to peer for total */
      if (!(res = gst_pad_query (GST_PAD_PEER (parse->sinkpad), query)))
        goto error;
      break;
    case GST_QUERY_CONVERT:
    {
      GstFormat src_fmt, dest_fmt;
      gint64 src_val, dest_val;

      gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
      if (!(res =
              theora_parse_src_convert (pad, src_fmt, src_val, &dest_fmt,
                  &dest_val)))
        goto error;

      gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
      break;
    }
    default:
      res = gst_pad_query_default (pad, parent, query);
      break;
  }
done:

  return res;

  /* ERRORS */
error:
  {
    GST_DEBUG_OBJECT (parse, "query failed");
    goto done;
  }
}
Exemplo n.º 12
0
gint
main (gint argc, gchar * argv[])
{
  GstElement *pipeline, *filesrc, *decodebin;
  GstStateChangeReturn res;
  GstIterator *it;
  GstBus *bus;
  GValue data = { 0, };

  gst_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");

  filesrc = gst_element_factory_make ("filesrc", "filesrc");
  g_assert (filesrc);

  decodebin = gst_element_factory_make ("decodebin", "decodebin");
  g_assert (decodebin);

  gst_bin_add_many (GST_BIN (pipeline), filesrc, decodebin, NULL);
  gst_element_link (filesrc, decodebin);

  if (argc < 2) {
    g_print ("usage: %s <filenames>\n", argv[0]);
    exit (-1);
  }

  if (!g_str_has_prefix (argv[1], "file://")) {
    g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
  } else {
    g_object_set (G_OBJECT (filesrc), "location", argv[1] + 7, NULL);
  }

  /* we've got to connect fakesinks to newly decoded pads to make sure
   * buffers have actually been flowing over those pads and caps have
   * been set on them. decodebin might insert internal queues and
   * without fakesinks it's pot-luck what caps we get from the pad, because
   * it depends on whether the queues have started pushing buffers yet or not.
   * With fakesinks we make sure that the pipeline doesn't go to PAUSED state
   * before each fakesink has a buffer queued. */
  g_signal_connect (decodebin, "new-decoded-pad",
      G_CALLBACK (new_decoded_pad_cb), pipeline);

  bus = gst_element_get_bus (pipeline);

  g_print ("pause..\n");
  res = gst_element_set_state (pipeline, GST_STATE_PAUSED);
  if (res == GST_STATE_CHANGE_FAILURE) {
    show_error ("Could not go to PAUSED state", bus);
    exit (-1);
  }
  g_print ("waiting..\n");
  res = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
  if (res != GST_STATE_CHANGE_SUCCESS) {
    show_error ("Failed to complete state change to PAUSED", bus);
    exit (-1);
  }
  g_print ("stats..\n");

  it = gst_element_iterate_src_pads (decodebin);
  while (gst_iterator_next (it, &data) == GST_ITERATOR_OK) {
    GstPad *pad = g_value_get_object (&data);
    GstCaps *caps;
    gchar *str;
    GstQuery *query;

    g_print ("stream %s:\n", GST_OBJECT_NAME (pad));

    caps = gst_pad_query_caps (pad, NULL);
    str = gst_caps_to_string (caps);
    g_print (" caps: %s\n", str);
    g_free (str);
    gst_caps_unref (caps);

    query = gst_query_new_duration (GST_FORMAT_TIME);
    if (gst_pad_query (pad, query)) {
      gint64 duration;

      gst_query_parse_duration (query, NULL, &duration);

      g_print (" duration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (duration));
    }
    gst_query_unref (query);

    g_value_reset (&data);
  }
  g_value_unset (&data);
  gst_iterator_free (it);

  return 0;
}
Exemplo n.º 13
0
static GstFlowReturn
gst_identity_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstIdentity *identity = GST_IDENTITY (trans);
  GstClockTime runtimestamp = G_GINT64_CONSTANT (0);

  if (identity->check_perfect)
    gst_identity_check_perfect (identity, buf);
  if (identity->check_imperfect_timestamp)
    gst_identity_check_imperfect_timestamp (identity, buf);
  if (identity->check_imperfect_offset)
    gst_identity_check_imperfect_offset (identity, buf);

  /* update prev values */
  identity->prev_timestamp = GST_BUFFER_TIMESTAMP (buf);
  identity->prev_duration = GST_BUFFER_DURATION (buf);
  identity->prev_offset_end = GST_BUFFER_OFFSET_END (buf);
  identity->prev_offset = GST_BUFFER_OFFSET (buf);

  if (identity->error_after >= 0) {
    identity->error_after--;
    if (identity->error_after == 0) {
      GST_ELEMENT_ERROR (identity, CORE, FAILED,
          (_("Failed after iterations as requested.")), (NULL));
      return GST_FLOW_ERROR;
    }
  }

  if (identity->drop_probability > 0.0) {
    if ((gfloat) (1.0 * rand () / (RAND_MAX)) < identity->drop_probability) {
      if (!identity->silent) {
        GST_OBJECT_LOCK (identity);
        g_free (identity->last_message);
        identity->last_message =
            g_strdup_printf
            ("dropping   ******* (%s:%s)i (%d bytes, timestamp: %"
            GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
            G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT
            ", flags: %d) %p", GST_DEBUG_PAD_NAME (trans->sinkpad),
            GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
            GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_OFFSET (buf),
            GST_BUFFER_OFFSET_END (buf), GST_BUFFER_FLAGS (buf), buf);
        GST_OBJECT_UNLOCK (identity);
        gst_identity_notify_last_message (identity);
      }
      /* return DROPPED to basetransform. */
      return GST_BASE_TRANSFORM_FLOW_DROPPED;
    }
  }

  if (identity->dump) {
    gst_util_dump_mem (GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
  }

  if (!identity->silent) {
    GST_OBJECT_LOCK (identity);
    g_free (identity->last_message);
    identity->last_message =
        g_strdup_printf ("chain   ******* (%s:%s)i (%d bytes, timestamp: %"
        GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
        G_GINT64_FORMAT ", offset_end: % " G_GINT64_FORMAT ", flags: %d) %p",
        GST_DEBUG_PAD_NAME (trans->sinkpad), GST_BUFFER_SIZE (buf),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
        GST_BUFFER_OFFSET (buf), GST_BUFFER_OFFSET_END (buf),
        GST_BUFFER_FLAGS (buf), buf);
    GST_OBJECT_UNLOCK (identity);
    gst_identity_notify_last_message (identity);
  }

  if (identity->datarate > 0) {
    GstClockTime time = gst_util_uint64_scale_int (identity->offset,
        GST_SECOND, identity->datarate);

    GST_BUFFER_TIMESTAMP (buf) = time;
    GST_BUFFER_DURATION (buf) =
        GST_BUFFER_SIZE (buf) * GST_SECOND / identity->datarate;
  }

  if (identity->signal_handoffs)
    g_signal_emit (identity, gst_identity_signals[SIGNAL_HANDOFF], 0, buf);

  if (trans->segment.format == GST_FORMAT_TIME)
    runtimestamp = gst_segment_to_running_time (&trans->segment,
        GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buf));

  if ((identity->sync) && (trans->segment.format == GST_FORMAT_TIME)) {
    GstClock *clock;

    GST_OBJECT_LOCK (identity);
    if ((clock = GST_ELEMENT (identity)->clock)) {
      GstClockReturn cret;
      GstClockTime timestamp;

      timestamp = runtimestamp + GST_ELEMENT (identity)->base_time;

      /* save id if we need to unlock */
      /* FIXME: actually unlock this somewhere in the state changes */
      identity->clock_id = gst_clock_new_single_shot_id (clock, timestamp);
      GST_OBJECT_UNLOCK (identity);

      cret = gst_clock_id_wait (identity->clock_id, NULL);

      GST_OBJECT_LOCK (identity);
      if (identity->clock_id) {
        gst_clock_id_unref (identity->clock_id);
        identity->clock_id = NULL;
      }
      if (cret == GST_CLOCK_UNSCHEDULED)
        ret = GST_FLOW_UNEXPECTED;
    }
    GST_OBJECT_UNLOCK (identity);
  }

  identity->offset += GST_BUFFER_SIZE (buf);

  if (identity->sleep_time && ret == GST_FLOW_OK)
    g_usleep (identity->sleep_time);

  if (identity->single_segment && (trans->segment.format == GST_FORMAT_TIME)
      && (ret == GST_FLOW_OK)) {
    GST_BUFFER_TIMESTAMP (buf) = runtimestamp;
    GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
    GST_BUFFER_OFFSET_END (buf) = GST_CLOCK_TIME_NONE;
  }

  return ret;
}
static GstFlowReturn
gst_base_rtp_audio_payload_handle_buffer (GstBaseRTPPayload *
    basepayload, GstBuffer * buffer)
{
  GstBaseRTPAudioPayload *payload;
  GstBaseRTPAudioPayloadPrivate *priv;
  guint payload_len;
  GstFlowReturn ret;
  guint available;
  guint min_payload_len;
  guint max_payload_len;
  guint align;
  guint size;
  gboolean discont;

  ret = GST_FLOW_OK;

  payload = GST_BASE_RTP_AUDIO_PAYLOAD_CAST (basepayload);
  priv = payload->priv;

  discont = GST_BUFFER_IS_DISCONT (buffer);
  if (discont) {
    GstClockTime timestamp;

    GST_DEBUG_OBJECT (payload, "Got DISCONT");
    /* flush everything out of the adapter, mark DISCONT */
    ret = gst_base_rtp_audio_payload_flush (payload, -1, -1);
    priv->discont = TRUE;

    timestamp = GST_BUFFER_TIMESTAMP (buffer);

    /* get the distance between the timestamp gap and produce the same gap in
     * the RTP timestamps */
    if (priv->last_timestamp != -1 && timestamp != -1) {
      /* we had a last timestamp, compare it to the new timestamp and update the
       * offset counter for RTP timestamps. The effect is that we will produce
       * output buffers containing the same RTP timestamp gap as the gap
       * between the GST timestamps. */
      if (timestamp > priv->last_timestamp) {
        GstClockTime diff;
        guint64 bytes;
        /* we're only going to apply a positive gap, otherwise we let the marker
         * bit do its thing. simply convert to bytes and add the the current
         * offset */
        diff = timestamp - priv->last_timestamp;
        bytes = priv->time_to_bytes (payload, diff);
        priv->offset += bytes;

        GST_DEBUG_OBJECT (payload,
            "elapsed time %" GST_TIME_FORMAT ", bytes %" G_GUINT64_FORMAT
            ", new offset %" G_GUINT64_FORMAT, GST_TIME_ARGS (diff), bytes,
            priv->offset);
      }
    }
  }

  if (!gst_base_rtp_audio_payload_get_lengths (basepayload, &min_payload_len,
          &max_payload_len, &align))
    goto config_error;

  GST_DEBUG_OBJECT (payload,
      "Calculated min_payload_len %u and max_payload_len %u",
      min_payload_len, max_payload_len);

  size = GST_BUFFER_SIZE (buffer);

  /* shortcut, we don't need to use the adapter when the packet can be pushed
   * through directly. */
  available = gst_adapter_available (priv->adapter);

  GST_DEBUG_OBJECT (payload, "got buffer size %u, available %u",
      size, available);

  if (available == 0 && (size >= min_payload_len && size <= max_payload_len) &&
      (size % align == 0)) {
    /* If buffer fits on an RTP packet, let's just push it through
     * this will check against max_ptime and max_mtu */
    GST_DEBUG_OBJECT (payload, "Fast packet push");
    ret = gst_base_rtp_audio_payload_push_buffer (payload, buffer);
  } else {
    /* push the buffer in the adapter */
    gst_adapter_push (priv->adapter, buffer);
    available += size;

    GST_DEBUG_OBJECT (payload, "available now %u", available);

    /* as long as we have full frames */
    while (available >= min_payload_len) {
      /* get multiple of alignment */
      payload_len = MIN (max_payload_len, available);
      payload_len = ALIGN_DOWN (payload_len, align);

      /* and flush out the bytes from the adapter, automatically set the
       * timestamp. */
      ret = gst_base_rtp_audio_payload_flush (payload, payload_len, -1);

      available -= payload_len;
      GST_DEBUG_OBJECT (payload, "available after push %u", available);
    }
  }
  return ret;

  /* ERRORS */
config_error:
  {
    GST_ELEMENT_ERROR (payload, STREAM, NOT_IMPLEMENTED, (NULL),
        ("subclass did not configure us properly"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 15
0
/* Reads in buffers, parses them, reframes into one-buffer-per-ogg-page, submits
 * pages to output pad.
 */
static GstFlowReturn
gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
{
  GstOggParse *ogg;
  GstFlowReturn result = GST_FLOW_OK;
  gint ret = -1;
  guint32 serialno;
  GstBuffer *pagebuffer;
  GstClockTime buffertimestamp = GST_BUFFER_TIMESTAMP (buffer);

  ogg = GST_OGG_PARSE (GST_OBJECT_PARENT (pad));

  GST_LOG_OBJECT (ogg, "Chain function received buffer of size %d",
      GST_BUFFER_SIZE (buffer));

  gst_ogg_parse_submit_buffer (ogg, buffer);

  while (ret != 0 && result == GST_FLOW_OK) {
    ogg_page page;

    /* We use ogg_sync_pageseek() rather than ogg_sync_pageout() so that we can
     * track how many bytes the ogg layer discarded (in the case of sync errors,
     * etc.); this allows us to accurately track the current stream offset
     */
    ret = ogg_sync_pageseek (&ogg->sync, &page);
    if (ret == 0) {
      /* need more data, that's fine... */
      break;
    } else if (ret < 0) {
      /* discontinuity; track how many bytes we skipped (-ret) */
      ogg->offset -= ret;
    } else {
      gint64 granule = ogg_page_granulepos (&page);
#ifndef GST_DISABLE_GST_DEBUG
      int bos = ogg_page_bos (&page);
#endif
      guint64 startoffset = ogg->offset;
      GstOggStream *stream;
      gboolean keyframe;

      serialno = ogg_page_serialno (&page);
      stream = gst_ogg_parse_find_stream (ogg, serialno);

      GST_LOG_OBJECT (ogg, "Timestamping outgoing buffer as %" GST_TIME_FORMAT,
          GST_TIME_ARGS (buffertimestamp));

      if (stream) {
        buffertimestamp = gst_ogg_stream_get_end_time_for_granulepos (stream,
            granule);
        if (ogg->video_stream) {
          if (stream == ogg->video_stream) {
            keyframe = gst_ogg_stream_granulepos_is_key_frame (stream, granule);
          } else {
            keyframe = FALSE;
          }
        } else {
          keyframe = TRUE;
        }
      } else {
        buffertimestamp = GST_CLOCK_TIME_NONE;
        keyframe = TRUE;
      }
      pagebuffer = gst_ogg_parse_buffer_from_page (&page, startoffset,
          buffertimestamp);

      /* We read out 'ret' bytes, so we set the next offset appropriately */
      ogg->offset += ret;

      GST_LOG_OBJECT (ogg,
          "processing ogg page (serial %08x, pageno %ld, "
          "granule pos %" G_GUINT64_FORMAT ", bos %d, offset %"
          G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT ") keyframe=%d",
          serialno, ogg_page_pageno (&page),
          granule, bos, startoffset, ogg->offset, keyframe);

      if (ogg_page_bos (&page)) {
        /* If we've seen this serialno before, this is technically an error,
         * we log this case but accept it - this one replaces the previous
         * stream with this serialno. We can do this since we're streaming, and
         * not supporting seeking...
         */
        GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno);

        if (stream != NULL) {
          GST_LOG_OBJECT (ogg, "Incorrect stream; repeats serial number %u "
              "at offset %" G_GINT64_FORMAT, serialno, ogg->offset);
        }

        if (ogg->last_page_not_bos) {
          GST_LOG_OBJECT (ogg, "Deleting all referenced streams, found a new "
              "chain starting with serial %u", serialno);
          gst_ogg_parse_delete_all_streams (ogg);
        }

        stream = gst_ogg_parse_new_stream (ogg, &page);

        ogg->last_page_not_bos = FALSE;

        gst_buffer_ref (pagebuffer);
        stream->headers = g_list_append (stream->headers, pagebuffer);

        if (!ogg->in_headers) {
          GST_LOG_OBJECT (ogg,
              "Found start of new chain at offset %" G_GUINT64_FORMAT,
              startoffset);
          ogg->in_headers = 1;
        }

        /* For now, we just keep the header buffer in the stream->headers list;
         * it actually gets output once we've collected the entire set
         */
      } else {
        /* Non-BOS page. Either: we're outside headers, and this isn't a 
         * header (normal data), outside headers and this is (error!), inside
         * headers, this is (append header), or inside headers and this isn't 
         * (we've found the end of headers; flush the lot!)
         *
         * Before that, we flag that the last page seen (this one) was not a 
         * BOS page; that way we know that when we next see a BOS page it's a
         * new chain, and we can flush all existing streams.
         */
        page_type type;
        GstOggStream *stream = gst_ogg_parse_find_stream (ogg, serialno);

        if (!stream) {
          GST_LOG_OBJECT (ogg,
              "Non-BOS page unexpectedly found at %" G_GINT64_FORMAT,
              ogg->offset);
          goto failure;
        }

        ogg->last_page_not_bos = TRUE;

        type = gst_ogg_parse_is_header (ogg, stream, &page);

        if (type == PAGE_PENDING && ogg->in_headers) {
          gst_buffer_ref (pagebuffer);

          stream->unknown_pages = g_list_append (stream->unknown_pages,
              pagebuffer);
        } else if (type == PAGE_HEADER) {
          if (!ogg->in_headers) {
            GST_LOG_OBJECT (ogg, "Header page unexpectedly found outside "
                "headers at offset %" G_GINT64_FORMAT, ogg->offset);
            goto failure;
          } else {
            /* Append the header to the buffer list, after any unknown previous
             * pages
             */
            stream->headers = g_list_concat (stream->headers,
                stream->unknown_pages);
            g_list_free (stream->unknown_pages);
            gst_buffer_ref (pagebuffer);
            stream->headers = g_list_append (stream->headers, pagebuffer);
          }
        } else {                /* PAGE_DATA, or PAGE_PENDING but outside headers */
          if (ogg->in_headers) {
            /* First non-header page... set caps, flush headers.
             *
             * First up, we build a single GValue list of all the pagebuffers
             * we're using for the headers, in order.
             * Then we set this on the caps structure. Then we can start pushing
             * buffers for the headers, and finally we send this non-header
             * page.
             */
            GstCaps *caps;
            GstStructure *structure;
            GValue array = { 0 };
            gint count = 0;
            gboolean found_pending_headers = FALSE;
            GSList *l;

            g_value_init (&array, GST_TYPE_ARRAY);

            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;

              if (g_list_length (stream->headers) == 0) {
                GST_LOG_OBJECT (ogg, "No primary header found for stream %08lx",
                    stream->serialno);
                goto failure;
              }

              gst_ogg_parse_append_header (&array,
                  GST_BUFFER (stream->headers->data));
              count++;
            }

            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              GList *j;

              /* already appended the first header, now do headers 2-N */
              for (j = stream->headers->next; j != NULL; j = j->next) {
                gst_ogg_parse_append_header (&array, GST_BUFFER (j->data));
                count++;
              }
            }

            caps = gst_pad_get_caps (ogg->srcpad);
            caps = gst_caps_make_writable (caps);

            structure = gst_caps_get_structure (caps, 0);
            gst_structure_set_value (structure, "streamheader", &array);

            gst_pad_set_caps (ogg->srcpad, caps);

            g_value_unset (&array);

            if (ogg->caps)
              gst_caps_unref (ogg->caps);
            ogg->caps = caps;

            GST_LOG_OBJECT (ogg, "Set \"streamheader\" caps with %d buffers "
                "(one per page)", count);

            /* Now, we do the same thing, but push buffers... */
            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              GstBuffer *buf = GST_BUFFER (stream->headers->data);

              buf = gst_buffer_make_metadata_writable (buf);
              gst_buffer_set_caps (buf, caps);

              result = gst_pad_push (ogg->srcpad, buf);
              if (result != GST_FLOW_OK)
                return result;
            }
            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              GList *j;

              /* pushed the first one for each stream already, now do 2-N */
              for (j = stream->headers->next; j != NULL; j = j->next) {
                GstBuffer *buf = GST_BUFFER (j->data);

                buf = gst_buffer_make_metadata_writable (buf);
                gst_buffer_set_caps (buf, caps);

                result = gst_pad_push (ogg->srcpad, buf);
                if (result != GST_FLOW_OK)
                  return result;
              }
            }

            ogg->in_headers = 0;

            /* And finally the pending data pages */
            for (l = ogg->oggstreams; l != NULL; l = l->next) {
              GstOggStream *stream = (GstOggStream *) l->data;
              GList *k;

              if (stream->unknown_pages == NULL)
                continue;

              if (found_pending_headers) {
                GST_WARNING_OBJECT (ogg, "Incorrectly muxed headers found at "
                    "approximate offset %" G_GINT64_FORMAT, ogg->offset);
              }
              found_pending_headers = TRUE;

              GST_LOG_OBJECT (ogg, "Pushing %d pending pages after headers",
                  g_list_length (stream->unknown_pages) + 1);

              for (k = stream->unknown_pages; k != NULL; k = k->next) {
                GstBuffer *buf;

                buf = gst_buffer_make_metadata_writable (GST_BUFFER (k->data));
                gst_buffer_set_caps (buf, caps);
                result = gst_pad_push (ogg->srcpad, buf);
                if (result != GST_FLOW_OK)
                  return result;
              }
              g_list_foreach (stream->unknown_pages,
                  (GFunc) gst_mini_object_unref, NULL);
              g_list_free (stream->unknown_pages);
              stream->unknown_pages = NULL;
            }
          }

          if (granule == -1) {
            stream->stored_buffers = g_list_append (stream->stored_buffers,
                pagebuffer);
          } else {
            while (stream->stored_buffers) {
              GstBuffer *buf = stream->stored_buffers->data;

              buf = gst_buffer_make_metadata_writable (buf);
              gst_buffer_set_caps (buf, ogg->caps);
              GST_BUFFER_TIMESTAMP (buf) = buffertimestamp;
              if (!keyframe) {
                GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
              } else {
                keyframe = FALSE;
              }

              result = gst_pad_push (ogg->srcpad, buf);
              if (result != GST_FLOW_OK)
                return result;

              stream->stored_buffers =
                  g_list_delete_link (stream->stored_buffers,
                  stream->stored_buffers);
            }

            pagebuffer = gst_buffer_make_metadata_writable (pagebuffer);
            gst_buffer_set_caps (pagebuffer, ogg->caps);
            if (!keyframe) {
              GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT);
            } else {
              keyframe = FALSE;
            }

            result = gst_pad_push (ogg->srcpad, pagebuffer);
            if (result != GST_FLOW_OK)
              return result;
          }
        }
      }
    }
  }

  return result;

failure:
  gst_pad_push_event (GST_PAD (ogg->srcpad), gst_event_new_eos ());
  return GST_FLOW_ERROR;
}
Exemplo n.º 16
0
static GstFlowReturn
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
{
  GstBaseVideoDecoder *base_video_decoder;
  GstFlowReturn ret;

  GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));

#if 0
  /* requiring the pad to be negotiated makes it impossible to use
   * oggdemux or filesrc ! decoder */
  if (!gst_pad_is_negotiated (pad)) {
    GST_DEBUG ("not negotiated");
    return GST_FLOW_NOT_NEGOTIATED;
  }
#endif

  base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));

  GST_DEBUG_OBJECT (base_video_decoder, "chain");

  if (!base_video_decoder->have_segment) {
    GstEvent *event;
    GstFlowReturn ret;

    GST_WARNING
        ("Received buffer without a new-segment. Assuming timestamps start from 0.");

    gst_segment_set_newsegment_full (&base_video_decoder->segment,
        FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
    base_video_decoder->have_segment = TRUE;

    event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
        GST_CLOCK_TIME_NONE, 0);

    ret =
        gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
        event);
    if (!ret) {
      GST_ERROR ("new segment event ret=%d", ret);
      return GST_FLOW_ERROR;
    }
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
    gst_base_video_decoder_flush (base_video_decoder);
  }

  base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
  if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
    gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
  }

  if (!base_video_decoder->current_frame)
    base_video_decoder->current_frame =
        gst_base_video_decoder_new_frame (base_video_decoder);

  if (base_video_decoder->packetized) {
    base_video_decoder->current_frame->sink_buffer = buf;

    ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
  } else {

    gst_adapter_push (base_video_decoder->input_adapter, buf);

    ret = gst_base_video_decoder_drain (base_video_decoder, FALSE);
  }

  gst_object_unref (base_video_decoder);
  return ret;
}
Exemplo n.º 17
0
static GstBuffer *
gst_rtp_dtmf_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
{

  GstRtpDTMFDepay *rtpdtmfdepay = NULL;
  GstBuffer *outbuf = NULL;
  gint payload_len;
  guint8 *payload = NULL;
  guint32 timestamp;
  GstRTPDTMFPayload dtmf_payload;
  gboolean marker;
  GstStructure *structure = NULL;
  GstMessage *dtmf_message = NULL;

  rtpdtmfdepay = GST_RTP_DTMF_DEPAY (depayload);

  if (!gst_rtp_buffer_validate (buf))
    goto bad_packet;

  payload_len = gst_rtp_buffer_get_payload_len (buf);
  payload = gst_rtp_buffer_get_payload (buf);

  if (payload_len != sizeof (GstRTPDTMFPayload))
    goto bad_packet;

  memcpy (&dtmf_payload, payload, sizeof (GstRTPDTMFPayload));

  if (dtmf_payload.event > MAX_EVENT)
    goto bad_packet;


  marker = gst_rtp_buffer_get_marker (buf);

  timestamp = gst_rtp_buffer_get_timestamp (buf);

  dtmf_payload.duration = g_ntohs (dtmf_payload.duration);

  /* clip to whole units of unit_time */
  if (rtpdtmfdepay->unit_time) {
    guint unit_time_clock =
        (rtpdtmfdepay->unit_time * depayload->clock_rate) / 1000;
    if (dtmf_payload.duration % unit_time_clock) {
      /* Make sure we don't overflow the duration */
      if (dtmf_payload.duration < G_MAXUINT16 - unit_time_clock)
        dtmf_payload.duration += unit_time_clock -
            (dtmf_payload.duration % unit_time_clock);
      else
        dtmf_payload.duration -= dtmf_payload.duration % unit_time_clock;
    }
  }

  /* clip to max duration */
  if (rtpdtmfdepay->max_duration) {
    guint max_duration_clock =
        (rtpdtmfdepay->max_duration * depayload->clock_rate) / 1000;

    if (max_duration_clock < G_MAXUINT16 &&
        dtmf_payload.duration > max_duration_clock)
      dtmf_payload.duration = max_duration_clock;
  }

  GST_DEBUG_OBJECT (depayload, "Received new RTP DTMF packet : "
      "marker=%d - timestamp=%u - event=%d - duration=%d",
      marker, timestamp, dtmf_payload.event, dtmf_payload.duration);

  GST_DEBUG_OBJECT (depayload,
      "Previous information : timestamp=%u - duration=%d",
      rtpdtmfdepay->previous_ts, rtpdtmfdepay->previous_duration);

  /* First packet */
  if (marker || rtpdtmfdepay->previous_ts != timestamp) {
    rtpdtmfdepay->sample = 0;
    rtpdtmfdepay->previous_ts = timestamp;
    rtpdtmfdepay->previous_duration = dtmf_payload.duration;
    rtpdtmfdepay->first_gst_ts = GST_BUFFER_TIMESTAMP (buf);

    structure = gst_structure_new ("dtmf-event",
        "number", G_TYPE_INT, dtmf_payload.event,
        "volume", G_TYPE_INT, dtmf_payload.volume,
        "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1, NULL);
    if (structure) {
      dtmf_message =
          gst_message_new_element (GST_OBJECT (depayload), structure);
      if (dtmf_message) {
        if (!gst_element_post_message (GST_ELEMENT (depayload), dtmf_message)) {
          GST_ERROR_OBJECT (depayload,
              "Unable to send dtmf-event message to bus");
        }
      } else {
        GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event message");
      }
    } else {
      GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event structure");
    }
  } else {
    guint16 duration = dtmf_payload.duration;
    dtmf_payload.duration -= rtpdtmfdepay->previous_duration;
    /* If late buffer, ignore */
    if (duration > rtpdtmfdepay->previous_duration)
      rtpdtmfdepay->previous_duration = duration;
  }

  GST_DEBUG_OBJECT (depayload, "new previous duration : %d - new duration : %d"
      " - diff  : %d - clock rate : %d - timestamp : %" G_GUINT64_FORMAT,
      rtpdtmfdepay->previous_duration, dtmf_payload.duration,
      (rtpdtmfdepay->previous_duration - dtmf_payload.duration),
      depayload->clock_rate, GST_BUFFER_TIMESTAMP (buf));

  /* If late or duplicate packet (like the redundant end packet). Ignore */
  if (dtmf_payload.duration > 0) {
    outbuf = gst_buffer_new ();
    gst_dtmf_src_generate_tone (rtpdtmfdepay, dtmf_payload, outbuf);


    GST_BUFFER_TIMESTAMP (outbuf) = rtpdtmfdepay->first_gst_ts +
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET (outbuf) =
        (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
        GST_SECOND / depayload->clock_rate;
    GST_BUFFER_OFFSET_END (outbuf) = rtpdtmfdepay->previous_duration *
        GST_SECOND / depayload->clock_rate;

    GST_DEBUG_OBJECT (depayload,
        "timestamp : %" G_GUINT64_FORMAT " - time %" GST_TIME_FORMAT,
        GST_BUFFER_TIMESTAMP (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));

  }

  return outbuf;


bad_packet:
  GST_ELEMENT_WARNING (rtpdtmfdepay, STREAM, DECODE,
      ("Packet did not validate"), (NULL));
  return NULL;
}
Exemplo n.º 18
0
GstFlowReturn
gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame)
{
  GstBaseVideoDecoderClass *base_video_decoder_class;

  GstClockTime presentation_timestamp;
  GstClockTime presentation_duration;

  GstBuffer *src_buffer;

  GST_DEBUG ("finish frame");

  base_video_decoder_class =
      GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);


  if (!gst_base_video_decoder_set_src_caps (base_video_decoder))
    return GST_FLOW_NOT_NEGOTIATED;

  gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame,
      &presentation_timestamp, &presentation_duration);

  src_buffer = frame->src_buffer;

  GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if (base_video_decoder->state.interlaced) {
#ifndef GST_VIDEO_BUFFER_TFF
#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
#endif
#ifndef GST_VIDEO_BUFFER_RFF
#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
#endif
#ifndef GST_VIDEO_BUFFER_ONEFIELD
#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
#endif

    if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF)) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);
    }
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF);
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    if (frame->n_fields == 3) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF);
    } else if (frame->n_fields == 1) {
      GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
    }
  }
  if (base_video_decoder->discont) {
    GST_BUFFER_FLAG_UNSET (src_buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_decoder->discont = FALSE;
  }

  GST_BUFFER_TIMESTAMP (src_buffer) = presentation_timestamp;
  GST_BUFFER_DURATION (src_buffer) = presentation_duration;
  GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE;
  GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE;

  GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
      GST_TIME_ARGS (presentation_timestamp));

  if (base_video_decoder->sink_clipping) {
    gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
    gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
        GST_BUFFER_DURATION (src_buffer);

    if (gst_segment_clip (&base_video_decoder->segment, GST_FORMAT_TIME,
            start, stop, &start, &stop)) {
      GST_BUFFER_TIMESTAMP (src_buffer) = start;
      GST_BUFFER_DURATION (src_buffer) = stop - start;
      GST_DEBUG ("accepting buffer inside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
    } else {
      GST_DEBUG ("dropping buffer outside segment: %" GST_TIME_FORMAT
          " %" GST_TIME_FORMAT
          " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
          " time %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer) +
              GST_BUFFER_DURATION (src_buffer)),
          GST_TIME_ARGS (base_video_decoder->segment.start),
          GST_TIME_ARGS (base_video_decoder->segment.stop),
          GST_TIME_ARGS (base_video_decoder->segment.time));
      gst_video_frame_unref (frame);
      return GST_FLOW_OK;
    }
  }

  gst_buffer_ref (src_buffer);
  gst_video_frame_unref (frame);

  if (base_video_decoder_class->shape_output)
    return base_video_decoder_class->shape_output (base_video_decoder,
        src_buffer);

  return gst_pad_push (GST_BASE_VIDEO_DECODER_SRC_PAD (base_video_decoder),
      src_buffer);
}
Exemplo n.º 19
0
HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
{
  GstBuffer *out_buf = NULL;
  gboolean in_seg = FALSE;
  GstClockTime buf_start, buf_stop;
  gint64 clip_start = 0, clip_stop = 0;
  guint start_offset = 0, stop_offset;
  GstClockTime duration;

  if(pMediaSample)
  {
    BYTE *pBuffer = NULL;
    LONGLONG lStart = 0, lStop = 0;
    long size = pMediaSample->GetActualDataLength();

    pMediaSample->GetPointer(&pBuffer);
    pMediaSample->GetTime(&lStart, &lStop);
    
    if (!GST_CLOCK_TIME_IS_VALID (mDec->timestamp)) {
      // Convert REFERENCE_TIME to GST_CLOCK_TIME
      mDec->timestamp = (GstClockTime)lStart * 100;
    }
    duration = (lStop - lStart) * 100;

    buf_start = mDec->timestamp;
    buf_stop = mDec->timestamp + duration;

    /* save stop position to start next buffer with it */
    mDec->timestamp = buf_stop;

    /* check if this buffer is in our current segment */
    in_seg = gst_segment_clip (mDec->segment, GST_FORMAT_TIME,
        buf_start, buf_stop, &clip_start, &clip_stop);

    /* if the buffer is out of segment do not push it downstream */
    if (!in_seg) {
      GST_DEBUG_OBJECT (mDec,
          "buffer is out of segment, start %" GST_TIME_FORMAT " stop %"
          GST_TIME_FORMAT, GST_TIME_ARGS (buf_start), GST_TIME_ARGS (buf_stop));
      goto done;
    }

    /* buffer is entirely or partially in-segment, so allocate a
     * GstBuffer for output, and clip if required */

    /* allocate a new buffer for raw audio */
    mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad, 
        GST_BUFFER_OFFSET_NONE,
        size,
        GST_PAD_CAPS (mDec->srcpad), &out_buf);
    if (!out_buf) {
      GST_WARNING_OBJECT (mDec, "cannot allocate a new GstBuffer");
      goto done;
    }

    /* set buffer properties */
    GST_BUFFER_TIMESTAMP (out_buf) = buf_start;
    GST_BUFFER_DURATION (out_buf) = duration;
    memcpy (GST_BUFFER_DATA (out_buf), pBuffer,
        MIN ((unsigned int)size, GST_BUFFER_SIZE (out_buf)));

    /* we have to remove some heading samples */
    if ((GstClockTime) clip_start > buf_start) {
      start_offset = (guint)gst_util_uint64_scale_int (clip_start - buf_start,
          mDec->rate, GST_SECOND) * mDec->depth / 8 * mDec->channels;
    }
    else
      start_offset = 0;
    /* we have to remove some trailing samples */
    if ((GstClockTime) clip_stop < buf_stop) {
      stop_offset = (guint)gst_util_uint64_scale_int (buf_stop - clip_stop,
          mDec->rate, GST_SECOND) * mDec->depth / 8 * mDec->channels;
    }
    else
      stop_offset = size;

    /* truncating */
    if ((start_offset != 0) || (stop_offset != (size_t) size)) {
      GstBuffer *subbuf = gst_buffer_create_sub (out_buf, start_offset,
          stop_offset - start_offset);

      if (subbuf) {
        gst_buffer_set_caps (subbuf, GST_PAD_CAPS (mDec->srcpad));
        gst_buffer_unref (out_buf);
        out_buf = subbuf;
      }
    }

    GST_BUFFER_TIMESTAMP (out_buf) = clip_start;
    GST_BUFFER_DURATION (out_buf) = clip_stop - clip_start;

    /* replace the saved stop position by the clipped one */
    mDec->timestamp = clip_stop;

    GST_DEBUG_OBJECT (mDec,
        "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
        " duration %" GST_TIME_FORMAT, size,
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf)),
        GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf) +
            GST_BUFFER_DURATION (out_buf)),
        GST_TIME_ARGS (GST_BUFFER_DURATION (out_buf)));

    mDec->last_ret = gst_pad_push (mDec->srcpad, out_buf);
  }

done:
  return S_OK;
}
Exemplo n.º 20
0
static gboolean
gst_base_video_parse_src_query (GstPad * pad, GstQuery * query)
{
  GstBaseVideoParse *base_parse;
  gboolean res = FALSE;

  base_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_POSITION:
    {
      GstFormat format;
      gint64 time;
      gint64 value;

      gst_query_parse_position (query, &format, NULL);

      time = gst_util_uint64_scale (base_parse->presentation_frame_number,
          base_parse->state.fps_n, base_parse->state.fps_d);
      time += base_parse->segment.time;
      GST_DEBUG ("query position %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
      res = gst_base_video_encoded_video_convert (&base_parse->state,
          GST_FORMAT_TIME, time, &format, &value);
      if (!res)
        goto error;

      gst_query_set_position (query, format, value);
      break;
    }
    case GST_QUERY_DURATION:
      res =
          gst_pad_query (GST_PAD_PEER (GST_BASE_VIDEO_CODEC_SINK_PAD
              (base_parse)), query);
      if (!res)
        goto error;
      break;
    case GST_QUERY_CONVERT:
    {
      GstFormat src_fmt, dest_fmt;
      gint64 src_val, dest_val;

      GST_WARNING ("query convert");

      gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
      res = gst_base_video_encoded_video_convert (&base_parse->state,
          src_fmt, src_val, &dest_fmt, &dest_val);
      if (!res)
        goto error;
      gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
      break;
    }
    default:
      res = gst_pad_query_default (pad, query);
      break;
  }
done:
  gst_object_unref (base_parse);

  return res;
error:
  GST_DEBUG_OBJECT (base_parse, "query failed");
  goto done;
}
Exemplo n.º 21
0
static GstFlowReturn
gst_base_rtp_depayload_chain (GstPad * pad, GstBuffer * in)
{
  GstBaseRTPDepayload *filter;
  GstBaseRTPDepayloadPrivate *priv;
  GstBaseRTPDepayloadClass *bclass;
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *out_buf;
  GstClockTime timestamp;
  guint16 seqnum;
  guint32 rtptime;
  gboolean reset_seq, discont;
  gint gap;

  filter = GST_BASE_RTP_DEPAYLOAD (GST_OBJECT_PARENT (pad));
  priv = filter->priv;

  /* we must have a setcaps first */
  if (G_UNLIKELY (!priv->negotiated))
    goto not_negotiated;

  /* we must validate, it's possible that this element is plugged right after a
   * network receiver and we don't want to operate on invalid data */
  if (G_UNLIKELY (!gst_rtp_buffer_validate (in)))
    goto invalid_buffer;

  priv->discont = GST_BUFFER_IS_DISCONT (in);

  timestamp = GST_BUFFER_TIMESTAMP (in);
  /* convert to running_time and save the timestamp, this is the timestamp
   * we put on outgoing buffers. */
  timestamp = gst_segment_to_running_time (&filter->segment, GST_FORMAT_TIME,
      timestamp);
  priv->timestamp = timestamp;
  priv->duration = GST_BUFFER_DURATION (in);

  seqnum = gst_rtp_buffer_get_seq (in);
  rtptime = gst_rtp_buffer_get_timestamp (in);
  reset_seq = TRUE;
  discont = FALSE;

  GST_LOG_OBJECT (filter, "discont %d, seqnum %u, rtptime %u, timestamp %"
      GST_TIME_FORMAT, priv->discont, seqnum, rtptime,
      GST_TIME_ARGS (timestamp));

  /* Check seqnum. This is a very simple check that makes sure that the seqnums
   * are striclty increasing, dropping anything that is out of the ordinary. We
   * can only do this when the next_seqnum is known. */
  if (G_LIKELY (priv->next_seqnum != -1)) {
    gap = gst_rtp_buffer_compare_seqnum (seqnum, priv->next_seqnum);

    /* if we have no gap, all is fine */
    if (G_UNLIKELY (gap != 0)) {
      GST_LOG_OBJECT (filter, "got packet %u, expected %u, gap %d", seqnum,
          priv->next_seqnum, gap);
      if (gap < 0) {
        /* seqnum > next_seqnum, we are missing some packets, this is always a
         * DISCONT. */
        GST_LOG_OBJECT (filter, "%d missing packets", gap);
        discont = TRUE;
      } else {
        /* seqnum < next_seqnum, we have seen this packet before or the sender
         * could be restarted. If the packet is not too old, we throw it away as
         * a duplicate, otherwise we mark discont and continue. 100 misordered
         * packets is a good threshold. See also RFC 4737. */
        if (gap < 100)
          goto dropping;

        GST_LOG_OBJECT (filter,
            "%d > 100, packet too old, sender likely restarted", gap);
        discont = TRUE;
      }
    }
  }
  priv->next_seqnum = (seqnum + 1) & 0xffff;

  if (G_UNLIKELY (discont && !priv->discont)) {
    GST_LOG_OBJECT (filter, "mark DISCONT on input buffer");
    /* we detected a seqnum discont but the buffer was not flagged with a discont,
     * set the discont flag so that the subclass can throw away old data. */
    priv->discont = TRUE;
    GST_BUFFER_FLAG_SET (in, GST_BUFFER_FLAG_DISCONT);
  }

  bclass = GST_BASE_RTP_DEPAYLOAD_GET_CLASS (filter);

  if (G_UNLIKELY (bclass->process == NULL))
    goto no_process;

  /* let's send it out to processing */
  out_buf = bclass->process (filter, in);
  if (out_buf) {
    /* we pass rtptime as backward compatibility, in reality, the incomming
     * buffer timestamp is always applied to the outgoing packet. */
    ret = gst_base_rtp_depayload_push_ts (filter, rtptime, out_buf);
  }
  gst_buffer_unref (in);

  return ret;

  /* ERRORS */
not_negotiated:
  {
    /* this is not fatal but should be filtered earlier */
    GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION, (NULL),
        ("Not RTP format was negotiated"));
    gst_buffer_unref (in);
    return GST_FLOW_NOT_NEGOTIATED;
  }
invalid_buffer:
  {
    /* this is not fatal but should be filtered earlier */
    GST_ELEMENT_WARNING (filter, STREAM, DECODE, (NULL),
        ("Received invalid RTP payload, dropping"));
    gst_buffer_unref (in);
    return GST_FLOW_OK;
  }
dropping:
  {
    GST_WARNING_OBJECT (filter, "%d <= 100, dropping old packet", gap);
    gst_buffer_unref (in);
    return GST_FLOW_OK;
  }
no_process:
  {
    /* this is not fatal but should be filtered earlier */
    GST_ELEMENT_ERROR (filter, STREAM, NOT_IMPLEMENTED, (NULL),
        ("The subclass does not have a process method"));
    gst_buffer_unref (in);
    return GST_FLOW_ERROR;
  }
}
Exemplo n.º 22
0
static gboolean
gst_base_video_parse_src_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoParse *base_video_parse;
  gboolean res = FALSE;

  base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_SEEK:
    {
      GstFormat format, tformat;
      gdouble rate;
      GstEvent *real_seek;
      GstSeekFlags flags;
      GstSeekType cur_type, stop_type;
      gint64 cur, stop;
      gint64 tcur, tstop;

      gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
          &cur, &stop_type, &stop);
      gst_event_unref (event);

      tformat = GST_FORMAT_TIME;
      res = gst_base_video_encoded_video_convert (&base_video_parse->state,
          format, cur, &tformat, &tcur);
      if (!res)
        goto convert_error;
      res = gst_base_video_encoded_video_convert (&base_video_parse->state,
          format, stop, &tformat, &tstop);
      if (!res)
        goto convert_error;

      real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
          flags, cur_type, tcur, stop_type, tstop);

      res =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse),
          real_seek);

      break;
    }
#if 0
    case GST_EVENT_QOS:
    {
      gdouble proportion;
      GstClockTimeDiff diff;
      GstClockTime timestamp;

      gst_event_parse_qos (event, &proportion, &diff, &timestamp);

      GST_OBJECT_LOCK (base_video_parse);
      base_video_parse->proportion = proportion;
      base_video_parse->earliest_time = timestamp + diff;
      GST_OBJECT_UNLOCK (base_video_parse);

      GST_DEBUG_OBJECT (base_video_parse,
          "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
          GST_TIME_ARGS (timestamp), diff);

      res = gst_pad_push_event (base_video_parse->sinkpad, event);
      break;
    }
#endif
    default:
      res =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse),
          event);
      break;
  }
done:
  gst_object_unref (base_video_parse);
  return res;

convert_error:
  GST_DEBUG_OBJECT (base_video_parse, "could not convert format");
  goto done;
}
Exemplo n.º 23
0
/**
 * This funcion will push out buffers on the source pad.
 *
 * For each pushed buffer, the seqnum is recorded, if the next buffer B has a
 * different seqnum (missing packets before B), this function will wait for the
 * missing packet to arrive up to the timestamp of buffer B.
 */
static void
gst_rtp_jitter_buffer_loop (GstRtpJitterBuffer * jitterbuffer)
{
  GstRtpJitterBufferPrivate *priv;
  GstBuffer *outbuf;
  GstFlowReturn result;
  guint16 seqnum;
  guint32 next_seqnum;
  GstClockTime timestamp, out_time;
  gboolean discont = FALSE;
  gint gap;

  priv = jitterbuffer->priv;

  JBUF_LOCK_CHECK (priv, flushing);
again:
  GST_DEBUG_OBJECT (jitterbuffer, "Peeking item");
  while (TRUE) {
    /* always wait if we are blocked */
    if (!priv->blocked) {
      /* if we have a packet, we can exit the loop and grab it */
      if (rtp_jitter_buffer_num_packets (priv->jbuf) > 0)
        break;
      /* no packets but we are EOS, do eos logic */
      if (priv->eos)
        goto do_eos;
    }
    /* underrun, wait for packets or flushing now */
    priv->waiting = TRUE;
    JBUF_WAIT_CHECK (priv, flushing);
    priv->waiting = FALSE;
  }

  /* peek a buffer, we're just looking at the timestamp and the sequence number.
   * If all is fine, we'll pop and push it. If the sequence number is wrong we
   * wait on the timestamp. In the chain function we will unlock the wait when a
   * new buffer is available. The peeked buffer is valid for as long as we hold
   * the jitterbuffer lock. */
  outbuf = rtp_jitter_buffer_peek (priv->jbuf);

  /* get the seqnum and the next expected seqnum */
  seqnum = gst_rtp_buffer_get_seq (outbuf);
  next_seqnum = priv->next_seqnum;

  /* get the timestamp, this is already corrected for clock skew by the
   * jitterbuffer */
  timestamp = GST_BUFFER_TIMESTAMP (outbuf);

  GST_DEBUG_OBJECT (jitterbuffer,
      "Peeked buffer #%d, expect #%d, timestamp %" GST_TIME_FORMAT
      ", now %d left", seqnum, next_seqnum, GST_TIME_ARGS (timestamp),
      rtp_jitter_buffer_num_packets (priv->jbuf));

  /* apply our timestamp offset to the incomming buffer, this will be our output
   * timestamp. */
  out_time = apply_offset (jitterbuffer, timestamp);

  /* get the gap between this and the previous packet. If we don't know the
   * previous packet seqnum assume no gap. */
  if (next_seqnum != -1) {
    gap = gst_rtp_buffer_compare_seqnum (next_seqnum, seqnum);

    /* if we have a packet that we already pushed or considered dropped, pop it
     * off and get the next packet */
    if (gap < 0) {
      GST_DEBUG_OBJECT (jitterbuffer, "Old packet #%d, next #%d dropping",
          seqnum, next_seqnum);
      outbuf = rtp_jitter_buffer_pop (priv->jbuf);
      gst_buffer_unref (outbuf);
      goto again;
    }
  } else {
    GST_DEBUG_OBJECT (jitterbuffer, "no next seqnum known, first packet");
    gap = -1;
  }

  /* If we don't know what the next seqnum should be (== -1) we have to wait
   * because it might be possible that we are not receiving this buffer in-order,
   * a buffer with a lower seqnum could arrive later and we want to push that
   * earlier buffer before this buffer then.
   * If we know the expected seqnum, we can compare it to the current seqnum to
   * determine if we have missing a packet. If we have a missing packet (which
   * must be before this packet) we can wait for it until the deadline for this
   * packet expires. */
  if (gap != 0 && out_time != -1) {
    GstClockID id;
    GstClockTime sync_time;
    GstClockReturn ret;
    GstClock *clock;
    GstClockTime duration = GST_CLOCK_TIME_NONE;

    if (gap > 0) {
      /* we have a gap */
      GST_WARNING_OBJECT (jitterbuffer,
          "Sequence number GAP detected: expected %d instead of %d (%d missing)",
          next_seqnum, seqnum, gap);

      if (priv->last_out_time != -1) {
        GST_DEBUG_OBJECT (jitterbuffer,
            "out_time %" GST_TIME_FORMAT ", last %" GST_TIME_FORMAT,
            GST_TIME_ARGS (out_time), GST_TIME_ARGS (priv->last_out_time));
        /* interpolate between the current time and the last time based on
         * number of packets we are missing, this is the estimated duration
         * for the missing packet based on equidistant packet spacing. Also make
         * sure we never go negative. */
        if (out_time > priv->last_out_time)
          duration = (out_time - priv->last_out_time) / (gap + 1);
        else
          goto lost;

        GST_DEBUG_OBJECT (jitterbuffer, "duration %" GST_TIME_FORMAT,
            GST_TIME_ARGS (duration));
        /* add this duration to the timestamp of the last packet we pushed */
        out_time = (priv->last_out_time + duration);
      }
    } else {
      /* we don't know what the next_seqnum should be, wait for the last
       * possible moment to push this buffer, maybe we get an earlier seqnum
       * while we wait */
      GST_DEBUG_OBJECT (jitterbuffer, "First buffer %d, do sync", seqnum);
    }

    GST_OBJECT_LOCK (jitterbuffer);
    clock = GST_ELEMENT_CLOCK (jitterbuffer);
    if (!clock) {
      GST_OBJECT_UNLOCK (jitterbuffer);
      /* let's just push if there is no clock */
      goto push_buffer;
    }

    GST_DEBUG_OBJECT (jitterbuffer, "sync to timestamp %" GST_TIME_FORMAT,
        GST_TIME_ARGS (out_time));

    /* prepare for sync against clock */
    sync_time = out_time + GST_ELEMENT_CAST (jitterbuffer)->base_time;
    /* add latency, this includes our own latency and the peer latency. */
    sync_time += (priv->latency_ms * GST_MSECOND);
    sync_time += priv->peer_latency;

    /* create an entry for the clock */
    id = priv->clock_id = gst_clock_new_single_shot_id (clock, sync_time);
    GST_OBJECT_UNLOCK (jitterbuffer);

    /* release the lock so that the other end can push stuff or unlock */
    JBUF_UNLOCK (priv);

    ret = gst_clock_id_wait (id, NULL);

    JBUF_LOCK (priv);
    /* and free the entry */
    gst_clock_id_unref (id);
    priv->clock_id = NULL;

    /* at this point, the clock could have been unlocked by a timeout, a new
     * tail element was added to the queue or because we are shutting down. Check
     * for shutdown first. */
    if (priv->srcresult != GST_FLOW_OK)
      goto flushing;

    /* if we got unscheduled and we are not flushing, it's because a new tail
     * element became available in the queue. Grab it and try to push or sync. */
    if (ret == GST_CLOCK_UNSCHEDULED) {
      GST_DEBUG_OBJECT (jitterbuffer,
          "Wait got unscheduled, will retry to push with new buffer");
      goto again;
    }

  lost:
    /* we now timed out, this means we lost a packet or finished synchronizing
     * on the first buffer. */
    if (gap > 0) {
      GstEvent *event;

      /* we had a gap and thus we lost a packet. Create an event for this.  */
      GST_DEBUG_OBJECT (jitterbuffer, "Packet #%d lost", next_seqnum);
      priv->num_late++;
      discont = TRUE;

      if (priv->do_lost) {
        /* create paket lost event */
        event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
            gst_structure_new ("GstRTPPacketLost",
                "seqnum", G_TYPE_UINT, (guint) next_seqnum,
                "timestamp", G_TYPE_UINT64, out_time,
                "duration", G_TYPE_UINT64, duration, NULL));
        gst_pad_push_event (priv->srcpad, event);
      }

      /* update our expected next packet */
      priv->last_popped_seqnum = next_seqnum;
      priv->last_out_time = out_time;
      priv->next_seqnum = (next_seqnum + 1) & 0xffff;
      /* look for next packet */
      goto again;
    }

    /* there was no known gap,just the first packet, exit the loop and push */
    GST_DEBUG_OBJECT (jitterbuffer, "First packet #%d synced", seqnum);

    /* get new timestamp, latency might have changed */
    out_time = apply_offset (jitterbuffer, timestamp);
  }
push_buffer:

  /* when we get here we are ready to pop and push the buffer */
  outbuf = rtp_jitter_buffer_pop (priv->jbuf);

  if (discont || priv->discont) {
    /* set DISCONT flag when we missed a packet. */
    outbuf = gst_buffer_make_metadata_writable (outbuf);
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
    priv->discont = FALSE;
  }

  /* apply timestamp with offset to buffer now */
  GST_BUFFER_TIMESTAMP (outbuf) = out_time;

  /* now we are ready to push the buffer. Save the seqnum and release the lock
   * so the other end can push stuff in the queue again. */
  priv->last_popped_seqnum = seqnum;
  priv->last_out_time = out_time;
  priv->next_seqnum = (seqnum + 1) & 0xffff;
  JBUF_UNLOCK (priv);

  /* push buffer */
  GST_DEBUG_OBJECT (jitterbuffer,
      "Pushing buffer %d, timestamp %" GST_TIME_FORMAT, seqnum,
      GST_TIME_ARGS (out_time));
  result = gst_pad_push (priv->srcpad, outbuf);
  if (result != GST_FLOW_OK)
    goto pause;

  return;

  /* ERRORS */
do_eos:
  {
    /* store result, we are flushing now */
    GST_DEBUG_OBJECT (jitterbuffer, "We are EOS, pushing EOS downstream");
    priv->srcresult = GST_FLOW_UNEXPECTED;
    gst_pad_pause_task (priv->srcpad);
    gst_pad_push_event (priv->srcpad, gst_event_new_eos ());
    JBUF_UNLOCK (priv);
    return;
  }
flushing:
  {
    GST_DEBUG_OBJECT (jitterbuffer, "we are flushing");
    gst_pad_pause_task (priv->srcpad);
    JBUF_UNLOCK (priv);
    return;
  }
pause:
  {
    const gchar *reason = gst_flow_get_name (result);

    GST_DEBUG_OBJECT (jitterbuffer, "pausing task, reason %s", reason);

    JBUF_LOCK (priv);
    /* store result */
    priv->srcresult = result;
    /* we don't post errors or anything because upstream will do that for us
     * when we pass the return value upstream. */
    gst_pad_pause_task (priv->srcpad);
    JBUF_UNLOCK (priv);
    return;
  }
}
Exemplo n.º 24
0
static gboolean
gst_base_video_parse_sink_event (GstPad * pad, GstEvent * event)
{
  GstBaseVideoParse *base_video_parse;
  gboolean ret = FALSE;

  base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_FLUSH_START:
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    case GST_EVENT_FLUSH_STOP:
      gst_base_video_parse_reset (base_video_parse);
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    case GST_EVENT_EOS:
      if (gst_base_video_parse_push_all (base_video_parse,
              FALSE) == GST_FLOW_ERROR) {
        gst_event_unref (event);
        return FALSE;
      }

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    case GST_EVENT_NEWSEGMENT:
    {
      gboolean update;
      GstFormat format;
      gdouble rate;
      gint64 start, stop, time;

      gst_event_parse_new_segment (event, &update, &rate, &format, &start,
          &stop, &time);

      if (format != GST_FORMAT_TIME)
        goto newseg_wrong_format;

      if (rate <= 0.0)
        goto newseg_wrong_rate;

      GST_DEBUG ("newsegment %" GST_TIME_FORMAT " %" GST_TIME_FORMAT,
          GST_TIME_ARGS (start), GST_TIME_ARGS (time));
      gst_segment_set_newsegment (&base_video_parse->segment, update,
          rate, format, start, stop, time);

      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
    }
    default:
      ret =
          gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
          event);
      break;
  }
done:
  gst_object_unref (base_video_parse);
  return ret;

newseg_wrong_format:
  GST_DEBUG_OBJECT (base_video_parse, "received non TIME newsegment");
  gst_event_unref (event);
  goto done;

newseg_wrong_rate:
  GST_DEBUG_OBJECT (base_video_parse, "negative rates not supported");
  gst_event_unref (event);
  goto done;
}
Exemplo n.º 25
0
static GstFlowReturn
gst_rtp_jitter_buffer_chain (GstPad * pad, GstBuffer * buffer)
{
  GstRtpJitterBuffer *jitterbuffer;
  GstRtpJitterBufferPrivate *priv;
  guint16 seqnum;
  GstFlowReturn ret = GST_FLOW_OK;
  GstClockTime timestamp;
  guint64 latency_ts;
  gboolean tail;

  jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));

  if (!gst_rtp_buffer_validate (buffer))
    goto invalid_buffer;

  priv = jitterbuffer->priv;

  if (priv->last_pt != gst_rtp_buffer_get_payload_type (buffer)) {
    GstCaps *caps;

    priv->last_pt = gst_rtp_buffer_get_payload_type (buffer);
    /* reset clock-rate so that we get a new one */
    priv->clock_rate = -1;
    /* Try to get the clock-rate from the caps first if we can. If there are no
     * caps we must fire the signal to get the clock-rate. */
    if ((caps = GST_BUFFER_CAPS (buffer))) {
      gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps);
    }
  }

  if (priv->clock_rate == -1) {
    guint8 pt;

    /* no clock rate given on the caps, try to get one with the signal */
    pt = gst_rtp_buffer_get_payload_type (buffer);

    gst_rtp_jitter_buffer_get_clock_rate (jitterbuffer, pt);
    if (priv->clock_rate == -1)
      goto not_negotiated;
  }

  /* take the timestamp of the buffer. This is the time when the packet was
   * received and is used to calculate jitter and clock skew. We will adjust
   * this timestamp with the smoothed value after processing it in the
   * jitterbuffer. */
  timestamp = GST_BUFFER_TIMESTAMP (buffer);
  /* bring to running time */
  timestamp = gst_segment_to_running_time (&priv->segment, GST_FORMAT_TIME,
      timestamp);

  seqnum = gst_rtp_buffer_get_seq (buffer);
  GST_DEBUG_OBJECT (jitterbuffer,
      "Received packet #%d at time %" GST_TIME_FORMAT, seqnum,
      GST_TIME_ARGS (timestamp));

  JBUF_LOCK_CHECK (priv, out_flushing);
  /* don't accept more data on EOS */
  if (priv->eos)
    goto have_eos;

  /* let's check if this buffer is too late, we can only accept packets with
   * bigger seqnum than the one we last pushed. */
  if (priv->last_popped_seqnum != -1) {
    gint gap;

    gap = gst_rtp_buffer_compare_seqnum (priv->last_popped_seqnum, seqnum);

    if (gap <= 0) {
      /* priv->last_popped_seqnum >= seqnum, this packet is too late or the
       * sender might have been restarted with different seqnum. */
      if (gap < -100) {
        GST_DEBUG_OBJECT (jitterbuffer, "reset: buffer too old %d", gap);
        priv->last_popped_seqnum = -1;
        priv->next_seqnum = -1;
      } else {
        goto too_late;
      }
    } else {
      /* priv->last_popped_seqnum < seqnum, this is a new packet */
      if (gap > 3000) {
        GST_DEBUG_OBJECT (jitterbuffer, "reset: too many dropped packets %d",
            gap);
        priv->last_popped_seqnum = -1;
        priv->next_seqnum = -1;
      }
    }
  }

  /* let's drop oldest packet if the queue is already full and drop-on-latency
   * is set. We can only do this when there actually is a latency. When no
   * latency is set, we just pump it in the queue and let the other end push it
   * out as fast as possible. */
  if (priv->latency_ms && priv->drop_on_latency) {

    latency_ts =
        gst_util_uint64_scale_int (priv->latency_ms, priv->clock_rate, 1000);

    if (rtp_jitter_buffer_get_ts_diff (priv->jbuf) >= latency_ts) {
      GstBuffer *old_buf;

      GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet #%d",
          seqnum);

      old_buf = rtp_jitter_buffer_pop (priv->jbuf);
      gst_buffer_unref (old_buf);
    }
  }

  /* now insert the packet into the queue in sorted order. This function returns
   * FALSE if a packet with the same seqnum was already in the queue, meaning we
   * have a duplicate. */
  if (!rtp_jitter_buffer_insert (priv->jbuf, buffer, timestamp,
          priv->clock_rate, &tail))
    goto duplicate;

  /* signal addition of new buffer when the _loop is waiting. */
  if (priv->waiting)
    JBUF_SIGNAL (priv);

  /* let's unschedule and unblock any waiting buffers. We only want to do this
   * when the tail buffer changed */
  if (priv->clock_id && tail) {
    GST_DEBUG_OBJECT (jitterbuffer,
        "Unscheduling waiting buffer, new tail buffer");
    gst_clock_id_unschedule (priv->clock_id);
  }

  GST_DEBUG_OBJECT (jitterbuffer, "Pushed packet #%d, now %d packets",
      seqnum, rtp_jitter_buffer_num_packets (priv->jbuf));

finished:
  JBUF_UNLOCK (priv);

  gst_object_unref (jitterbuffer);

  return ret;

  /* ERRORS */
invalid_buffer:
  {
    /* this is not fatal but should be filtered earlier */
    GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
        ("Received invalid RTP payload, dropping"));
    gst_buffer_unref (buffer);
    gst_object_unref (jitterbuffer);
    return GST_FLOW_OK;
  }
not_negotiated:
  {
    GST_WARNING_OBJECT (jitterbuffer, "No clock-rate in caps!");
    gst_buffer_unref (buffer);
    gst_object_unref (jitterbuffer);
    return GST_FLOW_OK;
  }
out_flushing:
  {
    ret = priv->srcresult;
    GST_DEBUG_OBJECT (jitterbuffer, "flushing %s", gst_flow_get_name (ret));
    gst_buffer_unref (buffer);
    goto finished;
  }
have_eos:
  {
    ret = GST_FLOW_UNEXPECTED;
    GST_WARNING_OBJECT (jitterbuffer, "we are EOS, refusing buffer");
    gst_buffer_unref (buffer);
    goto finished;
  }
too_late:
  {
    GST_WARNING_OBJECT (jitterbuffer, "Packet #%d too late as #%d was already"
        " popped, dropping", seqnum, priv->last_popped_seqnum);
    priv->num_late++;
    gst_buffer_unref (buffer);
    goto finished;
  }
duplicate:
  {
    GST_WARNING_OBJECT (jitterbuffer, "Duplicate packet #%d detected, dropping",
        seqnum);
    priv->num_duplicates++;
    gst_buffer_unref (buffer);
    goto finished;
  }
}
Exemplo n.º 26
0
GstFlowReturn
gst_base_video_parse_finish_frame (GstBaseVideoParse * base_video_parse)
{
  GstVideoFrame *frame = base_video_parse->current_frame;
  GstBuffer *buffer;
  GstBaseVideoParseClass *base_video_parse_class;
  GstFlowReturn ret;

  GST_DEBUG ("finish_frame");

  base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);

  buffer = gst_adapter_take_buffer (base_video_parse->output_adapter,
      gst_adapter_available (base_video_parse->output_adapter));

  if (frame->is_sync_point) {
    base_video_parse->timestamp_offset = base_video_parse->last_timestamp -
        gst_util_uint64_scale (frame->presentation_frame_number,
        base_video_parse->state.fps_d * GST_SECOND,
        base_video_parse->state.fps_n);
    base_video_parse->distance_from_sync = 0;
  }

  frame->distance_from_sync = base_video_parse->distance_from_sync;
  base_video_parse->distance_from_sync++;

  frame->presentation_timestamp =
      gst_base_video_parse_get_timestamp (base_video_parse,
      frame->presentation_frame_number);
  frame->presentation_duration =
      gst_base_video_parse_get_timestamp (base_video_parse,
      frame->presentation_frame_number + 1) - frame->presentation_timestamp;
  frame->decode_timestamp =
      gst_base_video_parse_get_timestamp (base_video_parse,
      frame->decode_frame_number);

  GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp;
  GST_BUFFER_DURATION (buffer) = frame->presentation_duration;
  if (frame->decode_frame_number < 0) {
    GST_BUFFER_OFFSET (buffer) = 0;
  } else {
    GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp;
  }
  GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE;

  GST_DEBUG ("pts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (frame->presentation_timestamp));
  GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
  GST_DEBUG ("dist %d", frame->distance_from_sync);

  if (frame->is_sync_point) {
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  frame->src_buffer = buffer;
  ret = base_video_parse_class->shape_output (base_video_parse, frame);

  gst_base_video_parse_free_frame (base_video_parse->current_frame);

  /* create new frame */
  base_video_parse->current_frame =
      gst_base_video_parse_new_frame (base_video_parse);

  return ret;
}
static GstMessage *
update_rms_from_buffer (GstVideoFrameAudioLevel * self, GstBuffer * inbuf)
{
  GstMapInfo map;
  guint8 *in_data;
  gsize in_size;
  gdouble CS;
  guint i;
  guint num_frames, frames;
  guint num_int_samples = 0;    /* number of interleaved samples
                                 * ie. total count for all channels combined */
  gint channels, rate, bps;
  GValue v = G_VALUE_INIT;
  GValue va = G_VALUE_INIT;
  GValueArray *a;
  GstStructure *s;
  GstMessage *msg;
  GstClockTime duration, running_time;

  channels = GST_AUDIO_INFO_CHANNELS (&self->ainfo);
  bps = GST_AUDIO_INFO_BPS (&self->ainfo);
  rate = GST_AUDIO_INFO_RATE (&self->ainfo);

  gst_buffer_map (inbuf, &map, GST_MAP_READ);
  in_data = map.data;
  in_size = map.size;

  num_int_samples = in_size / bps;

  GST_LOG_OBJECT (self, "analyzing %u sample frames at ts %" GST_TIME_FORMAT,
      num_int_samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));

  g_return_val_if_fail (num_int_samples % channels == 0, NULL);

  num_frames = num_int_samples / channels;
  frames = num_frames;
  duration = GST_FRAMES_TO_CLOCK_TIME (frames, rate);
  if (num_frames > 0) {
    for (i = 0; i < channels; ++i) {
      self->process (in_data + (bps * i), num_int_samples, channels, &CS);
      GST_LOG_OBJECT (self,
          "[%d]: cumulative squares %lf, over %d samples/%d channels",
          i, CS, num_int_samples, channels);
      self->CS[i] += CS;
    }
    in_data += num_frames * bps;

    self->total_frames += num_frames;
  }
  running_time =
      self->first_time + gst_util_uint64_scale (self->total_frames, GST_SECOND,
      rate);

  a = g_value_array_new (channels);
  s = gst_structure_new ("videoframe-audiolevel", "running-time", G_TYPE_UINT64,
      running_time, "duration", G_TYPE_UINT64, duration, NULL);

  g_value_init (&v, G_TYPE_DOUBLE);
  g_value_init (&va, G_TYPE_VALUE_ARRAY);
  for (i = 0; i < channels; i++) {
    gdouble rms;
    if (frames == 0 || self->CS[i] == 0) {
      rms = 0;                  /* empty buffer */
    } else {
      rms = sqrt (self->CS[i] / frames);
    }
    self->CS[i] = 0.0;
    g_value_set_double (&v, rms);
    g_value_array_append (a, &v);
  }
  g_value_take_boxed (&va, a);
  gst_structure_take_value (s, "rms", &va);
  msg = gst_message_new_element (GST_OBJECT (self), s);

  gst_buffer_unmap (inbuf, &map);

  return msg;
}
static GstFlowReturn mfw_gst_vpuenc_chain(GstPad * pad, GstBuffer * buffer)
{
	GstVPU_Enc *vpu_enc = NULL;
	GstFlowReturn retval = GST_FLOW_OK;
	GstCaps *src_caps;
	GstBuffer *outbuffer;
	gint i = 0;
	int ret;
	struct pollfd pollfd;
	unsigned long type = V4L2_BUF_TYPE_VIDEO_OUTPUT;

	GST_DEBUG(__func__);

	vpu_enc = MFW_GST_VPU_ENC(GST_PAD_PARENT(pad));

	if (vpu_enc->init == FALSE) {
		retval = mfw_gst_vpuenc_init_encoder(pad, vpu_enc->memory);
		if (retval != GST_FLOW_OK)
			return retval;
		printf("VPU ENC initialised\n");
	}

	i = 0;
	if (vpu_enc->memory == V4L2_MEMORY_USERPTR) {
		for (i = 0; i < NUM_BUFFERS; i++) {
			if (vpu_enc->buf_v4l2[i].m.userptr == (long int)GST_BUFFER_DATA (buffer))
				break;
		}
		if (i == NUM_BUFFERS) {
			for (i = 0; i < NUM_BUFFERS; i++) {
				if (!vpu_enc->buf_v4l2[i].m.userptr)
					break;
			}
		}
		i = i % NUM_BUFFERS;
	}

	if (i == NUM_BUFFERS) {
		printf("NO BUFFER AVAILABLE\n");
		return GST_FLOW_ERROR;
	}

	if (!buffer)
		return GST_FLOW_OK;

	if (vpu_enc->memory == V4L2_MEMORY_MMAP) {
		/* copy the input Frame into the allocated buffer */
		memcpy(vpu_enc->buf_data[i], GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
		gst_buffer_unref(buffer);
	} else {
		vpu_enc->buf_v4l2[i].m.userptr = (long int)GST_BUFFER_DATA (buffer);
		vpu_enc->buf_v4l2[i].length = GST_BUFFER_SIZE (buffer);
	}

	pollfd.fd = vpu_enc->vpu_fd;
	pollfd.events = POLLIN | POLLOUT;

	ret = ioctl(vpu_enc->vpu_fd, VIDIOC_QBUF, &vpu_enc->buf_v4l2[i]);
	if (ret) {
		if (vpu_enc->memory == V4L2_MEMORY_USERPTR) {
			/* fallback to mmap */
			vpu_enc->init = FALSE;
			vpu_enc->memory = V4L2_MEMORY_MMAP;
			GST_WARNING("mfw_gst_vpuenc_chain: fallback to mmap");
			return mfw_gst_vpuenc_chain(pad, buffer);
		}
		GST_ERROR("VIDIOC_QBUF failed: %s\n", strerror(errno));
		return GST_FLOW_ERROR;
	}

	if (!vpu_enc->once) {
		retval = ioctl(vpu_enc->vpu_fd, VIDIOC_STREAMON, &type);
		if (retval) {
			printf("streamon failed with %d", retval);
			return GST_FLOW_ERROR;
		}
		vpu_enc->once = 1;
	}

	ret = ioctl(vpu_enc->vpu_fd, VIDIOC_DQBUF, &vpu_enc->buf_v4l2[0]);
	if (ret) {
		GST_ERROR("VIDIOC_DQBUF failed: %s\n", strerror(errno));
		return GST_FLOW_ERROR;
	}

	if (vpu_enc->memory == V4L2_MEMORY_USERPTR) {
		gst_buffer_unref(buffer);
	}

	src_caps = GST_PAD_CAPS(vpu_enc->srcpad);

	retval = gst_pad_alloc_buffer_and_set_caps(vpu_enc->srcpad,
			0, 1024 * 1024, src_caps, &outbuffer);
	if (retval != GST_FLOW_OK) {
		GST_ERROR("Allocating buffer failed with %d", ret);
		return retval;
	}

	ret = read(vpu_enc->vpu_fd, GST_BUFFER_DATA(outbuffer), 1024 * 1024);
	if (ret < 0) {
		printf("read failed: %s\n", strerror(errno));
		return GST_FLOW_ERROR;
	}
	GST_BUFFER_SIZE(outbuffer) = ret;
	GST_BUFFER_TIMESTAMP(outbuffer) = gst_util_uint64_scale(vpu_enc->encoded_frames,
		1 * GST_SECOND,
		vpu_enc->framerate);

	vpu_enc->encoded_frames++;

	GST_DEBUG_OBJECT(vpu_enc, "frame encoded : %lld ts = %" GST_TIME_FORMAT,
			vpu_enc->encoded_frames,
			GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(outbuffer)));

	retval = gst_pad_push(vpu_enc->srcpad, outbuffer);
	if (retval != GST_FLOW_OK) {
		GST_ERROR("Pushing Output onto the source pad failed with %d \n",
			  retval);
	}

	return retval;
}
Exemplo n.º 29
0
/* Get a DV frame, chop it up in pieces, and push the pieces to the RTP layer.
 */
static GstFlowReturn
gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * basepayload,
    GstBuffer * buffer)
{
  GstRTPDVPay *rtpdvpay;
  guint max_payload_size;
  GstBuffer *outbuf;
  GstFlowReturn ret = GST_FLOW_OK;
  gint hdrlen;
  gsize size;
  GstMapInfo map;
  guint8 *data;
  guint8 *dest;
  guint filled;
  GstRTPBuffer rtp = { NULL, };

  rtpdvpay = GST_RTP_DV_PAY (basepayload);

  hdrlen = gst_rtp_buffer_calc_header_len (0);
  /* DV frames are made up from a bunch of DIF blocks. DIF blocks are 80 bytes
   * each, and we should put an integral number of them in each RTP packet.
   * Therefore, we round the available room down to the nearest multiple of 80.
   *
   * The available room is just the packet MTU, minus the RTP header length. */
  max_payload_size = ((GST_RTP_BASE_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;

  /* The length of the buffer to transmit. */
  if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) {
    GST_ELEMENT_ERROR (rtpdvpay, CORE, FAILED,
        (NULL), ("Failed to map buffer"));
    gst_buffer_unref (buffer);
    return GST_FLOW_ERROR;
  }
  data = map.data;
  size = map.size;

  GST_DEBUG_OBJECT (rtpdvpay,
      "DV RTP payloader got buffer of %" G_GSIZE_FORMAT
      " bytes, splitting in %u byte " "payload fragments, at time %"
      GST_TIME_FORMAT, size, max_payload_size,
      GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));

  if (!rtpdvpay->negotiated) {
    gst_dv_pay_negotiate (rtpdvpay, data, size);
    /* if we have not yet scanned the stream for its type, do so now */
    rtpdvpay->negotiated = TRUE;
  }

  outbuf = NULL;
  dest = NULL;
  filled = 0;

  /* while we have a complete DIF chunks left */
  while (size >= 80) {
    /* Allocate a new buffer, set the timestamp */
    if (outbuf == NULL) {
      outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0);
      GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buffer);

      if (!gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp)) {
        gst_buffer_unref (outbuf);
        GST_ELEMENT_ERROR (rtpdvpay, CORE, FAILED,
            (NULL), ("Failed to map RTP buffer"));
        ret = GST_FLOW_ERROR;
        goto beach;
      }
      dest = gst_rtp_buffer_get_payload (&rtp);
      filled = 0;
    }

    /* inspect the DIF chunk, if we don't need to include it, skip to the next one. */
    if (include_dif (rtpdvpay, data)) {
      /* copy data in packet */
      memcpy (dest, data, 80);

      dest += 80;
      filled += 80;
    }

    /* go to next dif chunk */
    size -= 80;
    data += 80;

    /* push out the buffer if the next one would exceed the max packet size or
     * when we are pushing the last packet */
    if (filled + 80 > max_payload_size || size < 80) {
      if (size < 160) {
        guint hlen;

        /* set marker */
        gst_rtp_buffer_set_marker (&rtp, TRUE);

        /* shrink buffer to last packet */
        hlen = gst_rtp_buffer_get_header_len (&rtp);
        gst_rtp_buffer_set_packet_len (&rtp, hlen + filled);
      }

      /* Push out the created piece, and check for errors. */
      gst_rtp_buffer_unmap (&rtp);
      ret = gst_rtp_base_payload_push (basepayload, outbuf);
      if (ret != GST_FLOW_OK)
        break;

      outbuf = NULL;
    }
  }

beach:
  gst_buffer_unmap (buffer, &map);
  gst_buffer_unref (buffer);

  return ret;
}
static GstFlowReturn
gst_base_rtp_audio_payload_push_buffer (GstBaseRTPAudioPayload *
    baseaudiopayload, GstBuffer * buffer)
{
  GstBaseRTPPayload *basepayload;
  GstBaseRTPAudioPayloadPrivate *priv;
  GstBuffer *outbuf;
  GstClockTime timestamp;
  guint8 *payload;
  guint payload_len;
  GstFlowReturn ret;

  priv = baseaudiopayload->priv;
  basepayload = GST_BASE_RTP_PAYLOAD (baseaudiopayload);

  payload_len = GST_BUFFER_SIZE (buffer);
  timestamp = GST_BUFFER_TIMESTAMP (buffer);

  GST_DEBUG_OBJECT (baseaudiopayload, "Pushing %d bytes ts %" GST_TIME_FORMAT,
      payload_len, GST_TIME_ARGS (timestamp));

  if (priv->buffer_list) {
    /* create just the RTP header buffer */
    outbuf = gst_rtp_buffer_new_allocate (0, 0, 0);
  } else {
    /* create buffer to hold the payload */
    outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0);
  }

  /* set metadata */
  gst_base_rtp_audio_payload_set_meta (baseaudiopayload, outbuf, payload_len,
      timestamp);

  if (priv->buffer_list) {
    GstBufferList *list;
    GstBufferListIterator *it;

    list = gst_buffer_list_new ();
    it = gst_buffer_list_iterate (list);

    /* add both buffers to the buffer list */
    gst_buffer_list_iterator_add_group (it);
    gst_buffer_list_iterator_add (it, outbuf);
    gst_buffer_list_iterator_add (it, buffer);

    gst_buffer_list_iterator_free (it);

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing list %p", list);
    ret = gst_basertppayload_push_list (basepayload, list);
  } else {
    /* copy payload */
    payload = gst_rtp_buffer_get_payload (outbuf);
    memcpy (payload, GST_BUFFER_DATA (buffer), payload_len);
    gst_buffer_unref (buffer);

    GST_DEBUG_OBJECT (baseaudiopayload, "Pushing buffer %p", outbuf);
    ret = gst_basertppayload_push (basepayload, outbuf);
  }

  return ret;
}