Пример #1
0
static gboolean
handle_buffer (GstRtpOnvifParse * self, GstBuffer * buf)
{
  GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
  guint8 *data;
  guint16 bits;
  guint wordlen;
  guint8 flags;
  /*
     guint64 timestamp;
     guint8 cseq;
   */

  if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp)) {
    GST_ELEMENT_ERROR (self, STREAM, FAILED,
        ("Failed to map RTP buffer"), (NULL));
    return FALSE;
  }

  /* Check if the ONVIF RTP extension is present in the packet */
  if (!gst_rtp_buffer_get_extension_data (&rtp, &bits, (gpointer) & data,
          &wordlen))
    goto out;

  if (bits != EXTENSION_ID || wordlen != EXTENSION_SIZE)
    goto out;

  /* timestamp = GST_READ_UINT64_BE (data);  TODO */
  flags = GST_READ_UINT8 (data + 8);
  /* cseq = GST_READ_UINT8 (data + 9);  TODO */

  /* C */
  if (flags & (1 << 7))
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);

  /* E */
  /* if (flags & (1 << 6));  TODO */

  /* D */
  if (flags & (1 << 5))
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
  else
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);

out:
  gst_rtp_buffer_unmap (&rtp);
  return TRUE;
}
static GstFlowReturn
gst_mpegv_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstMpegvParse *mpvparse = GST_MPEGVIDEO_PARSE (parse);
  GstBuffer *buffer = frame->buffer;

  gst_mpegv_parse_update_src_caps (mpvparse);

  if (G_UNLIKELY (mpvparse->pichdr.pic_type == GST_MPEG_VIDEO_PICTURE_TYPE_I))
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

  /* maybe only sequence in this buffer, though not recommended,
   * so mark it as such and force 0 duration */
  if (G_UNLIKELY (mpvparse->pic_offset < 0)) {
    GST_DEBUG_OBJECT (mpvparse, "frame holds no picture data");
    frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
    GST_BUFFER_DURATION (buffer) = 0;
  }

  if (mpvparse->frame_repeat_count
      && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
    GST_BUFFER_DURATION (buffer) =
        (1 + mpvparse->frame_repeat_count) * GST_BUFFER_DURATION (buffer) / 2;
  }

  if (G_UNLIKELY (mpvparse->drop && !mpvparse->config)) {
    GST_DEBUG_OBJECT (mpvparse, "dropping frame as no config yet");
    return GST_BASE_PARSE_FLOW_DROPPED;
  } else
    return GST_FLOW_OK;
}
Пример #3
0
static VTStatus
gst_vtenc_enqueue_buffer (void *data, int a2, int a3, int a4,
    CMSampleBufferRef sbuf, int a6, int a7)
{
  GstVTEnc *self = data;
  gboolean is_keyframe;
  GstBuffer *buf;

  /* This may happen if we don't have enough bitrate */
  if (sbuf == NULL)
    goto beach;

  is_keyframe = gst_vtenc_buffer_is_keyframe (self, sbuf);
  if (self->expect_keyframe) {
    if (!is_keyframe)
      goto beach;
    CFDictionaryRemoveValue (self->options,
        *(self->ctx->vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
  }
  self->expect_keyframe = FALSE;

  buf = gst_core_media_buffer_new (self->ctx, sbuf);
  gst_buffer_copy_metadata (buf, self->cur_inbuf, GST_BUFFER_COPY_TIMESTAMPS);
  if (is_keyframe) {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
  } else {
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  g_ptr_array_add (self->cur_outbufs, buf);

beach:
  return kVTSuccess;
}
Пример #4
0
static GstFlowReturn
gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstH264Parse *h264parse;
  GstBuffer *buffer;
  guint av;

  h264parse = GST_H264_PARSE (parse);
  buffer = frame->buffer;

  gst_h264_parse_update_src_caps (h264parse);

  gst_h264_params_get_timestamp (h264parse->params,
      &GST_BUFFER_TIMESTAMP (buffer), &GST_BUFFER_DURATION (buffer),
      h264parse->frame_start);

  if (h264parse->keyframe)
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

  /* replace with transformed AVC output if applicable */
  av = gst_adapter_available (h264parse->frame_out);
  if (av) {
    GstBuffer *buf;

    buf = gst_adapter_take_buffer (h264parse->frame_out, av);
    gst_buffer_copy_metadata (buf, buffer, GST_BUFFER_COPY_ALL);
    gst_buffer_replace (&frame->buffer, buf);
  }

  return GST_FLOW_OK;
}
void PlaybackPipeline::enqueueSample(PassRefPtr<MediaSample> prsample)
{
    RefPtr<MediaSample> rsample = prsample;
    AtomicString trackId = rsample->trackID();

    TRACE_MEDIA_MESSAGE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT, trackId.string().utf8().data(), rsample->presentationTime().toFloat(), rsample->presentationSize().width(), rsample->presentationSize().height(), GST_TIME_ARGS(floatToGstClockTime(rsample->presentationTime().toDouble())));

    ASSERT(WTF::isMainThread());

    GST_OBJECT_LOCK(m_webKitMediaSrc.get());
    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        WARN_MEDIA_MESSAGE("No stream!");
        GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    GST_OBJECT_UNLOCK(m_webKitMediaSrc.get());

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(rsample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GstSample* gstsample = gst_sample_ref(sample->sample());
        GST_BUFFER_FLAG_UNSET(gst_sample_get_buffer(gstsample), GST_BUFFER_FLAG_DECODE_ONLY);
        push_sample(GST_APP_SRC(appsrc), gstsample);
        // gst_app_src_push_sample() uses transfer-none for gstsample
        gst_sample_unref(gstsample);
    }
}
static GstFlowReturn
do_alloc_buffer (GstBufferPool * pool, GstBuffer ** buffer,
    GstBufferPoolAcquireParams * params)
{
  GstBufferPoolPrivate *priv = pool->priv;
  GstFlowReturn result;
  gint cur_buffers, max_buffers;
  GstBufferPoolClass *pclass;

  pclass = GST_BUFFER_POOL_GET_CLASS (pool);

  if (G_UNLIKELY (!pclass->alloc_buffer))
    goto no_function;

  max_buffers = priv->max_buffers;

  /* increment the allocation counter */
  cur_buffers = g_atomic_int_add (&priv->cur_buffers, 1);
  if (max_buffers && cur_buffers >= max_buffers)
    goto max_reached;

  result = pclass->alloc_buffer (pool, buffer, params);
  if (G_UNLIKELY (result != GST_FLOW_OK))
    goto alloc_failed;

  /* lock all metadata and mark as pooled, we want this to remain on
   * the buffer and we want to remove any other metadata that gets added
   * later */
  gst_buffer_foreach_meta (*buffer, mark_meta_pooled, pool);

  /* un-tag memory, this is how we expect the buffer when it is
   * released again */
  GST_BUFFER_FLAG_UNSET (*buffer, GST_BUFFER_FLAG_TAG_MEMORY);

  GST_LOG_OBJECT (pool, "allocated buffer %d/%d, %p", cur_buffers,
      max_buffers, *buffer);

  return result;

  /* ERRORS */
no_function:
  {
    GST_ERROR_OBJECT (pool, "no alloc function");
    return GST_FLOW_NOT_SUPPORTED;
  }
max_reached:
  {
    GST_DEBUG_OBJECT (pool, "max buffers reached");
    g_atomic_int_add (&priv->cur_buffers, -1);
    return GST_FLOW_EOS;
  }
alloc_failed:
  {
    GST_WARNING_OBJECT (pool, "alloc function failed");
    g_atomic_int_add (&priv->cur_buffers, -1);
    return result;
  }
}
Пример #7
0
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
                          GstBuffer ** buf)
{
    GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
    GstBuffer *buffer;

    GST_DEBUG_OBJECT (intersubsrc, "create");

    buffer = NULL;

    g_mutex_lock (&intersubsrc->surface->mutex);
    if (intersubsrc->surface->sub_buffer) {
        buffer = gst_buffer_ref (intersubsrc->surface->sub_buffer);
        //intersubsrc->surface->sub_buffer_count++;
        //if (intersubsrc->surface->sub_buffer_count >= 30) {
        gst_buffer_unref (intersubsrc->surface->sub_buffer);
        intersubsrc->surface->sub_buffer = NULL;
        //}
    }
    g_mutex_unlock (&intersubsrc->surface->mutex);

    if (buffer == NULL) {
        GstMapInfo map;

        buffer = gst_buffer_new_and_alloc (1);

        gst_buffer_map (buffer, &map, GST_MAP_WRITE);
        map.data[0] = 0;
        gst_buffer_unmap (buffer, &map);
    }

    buffer = gst_buffer_make_writable (buffer);

    /* FIXME: does this make sense? Rate is always 0 */
#if 0
    GST_BUFFER_TIMESTAMP (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, intersubsrc->n_frames,
                                   intersubsrc->rate);
    GST_DEBUG_OBJECT (intersubsrc, "create ts %" GST_TIME_FORMAT,
                      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
    GST_BUFFER_DURATION (buffer) =
        gst_util_uint64_scale_int (GST_SECOND, (intersubsrc->n_frames + 1),
                                   intersubsrc->rate) - GST_BUFFER_TIMESTAMP (buffer);
#endif
    GST_BUFFER_OFFSET (buffer) = intersubsrc->n_frames;
    GST_BUFFER_OFFSET_END (buffer) = -1;
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
    if (intersubsrc->n_frames == 0) {
        GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    }
    intersubsrc->n_frames++;

    *buf = buffer;

    return GST_FLOW_OK;
}
Пример #8
0
static gboolean
gst_raw_video_parse_process (GstRawBaseParse * raw_base_parse,
    GstRawBaseParseConfig config, GstBuffer * in_data,
    G_GNUC_UNUSED gsize total_num_in_bytes,
    G_GNUC_UNUSED gsize num_valid_in_bytes, GstBuffer ** processed_data)
{
  GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
  GstRawVideoParseConfig *config_ptr =
      gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
  guint frame_flags = 0;
  GstVideoInfo *video_info = &(config_ptr->info);
  GstVideoMeta *videometa;
  GstBuffer *out_data;

  /* In case of extra padding bytes, get a subbuffer without the padding bytes.
   * Otherwise, just add the video meta. */
  if (GST_VIDEO_INFO_SIZE (video_info) < config_ptr->frame_stride) {
    *processed_data = out_data =
        gst_buffer_copy_region (in_data,
        GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
        GST_BUFFER_COPY_MEMORY, 0, GST_VIDEO_INFO_SIZE (video_info));
  } else {
    out_data = in_data;
    *processed_data = NULL;
  }

  if (config_ptr->interlaced) {
    GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_INTERLACED);
    frame_flags |= GST_VIDEO_FRAME_FLAG_INTERLACED;

    if (config_ptr->top_field_first) {
      GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
      frame_flags |= GST_VIDEO_FRAME_FLAG_TFF;
    } else
      GST_BUFFER_FLAG_UNSET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
  }

  /* Remove any existing videometa - it will be replaced by the new videometa
   * from here */
  while ((videometa = gst_buffer_get_video_meta (out_data))) {
    GST_LOG_OBJECT (raw_base_parse, "removing existing videometa from buffer");
    gst_buffer_remove_meta (out_data, (GstMeta *) videometa);
  }

  gst_buffer_add_video_meta_full (out_data,
      frame_flags,
      config_ptr->format,
      config_ptr->width,
      config_ptr->height,
      GST_VIDEO_INFO_N_PLANES (video_info),
      config_ptr->plane_offsets, config_ptr->plane_strides);


  return TRUE;
}
Пример #9
0
static void
gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
{
  GstV4l2BufferPool *pool;
  gboolean resuscitated = FALSE;
  gint index;

  pool = buffer->pool;

  index = buffer->vbuffer.index;

  GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index);

  GST_V4L2_BUFFER_POOL_LOCK (pool);
  if (pool->running) {
    if (pool->requeuebuf) {
      if (!gst_v4l2_buffer_pool_qbuf (pool, buffer)) {
        GST_WARNING ("could not requeue buffer %p %d", buffer, index);
      } else {
        resuscitated = TRUE;
      }
    } else {
      resuscitated = TRUE;
      /* XXX double check this... I think it is ok to not synchronize this
       * w.r.t. destruction of the pool, since the buffer is still live and
       * the buffer holds a ref to the pool..
       */
      g_async_queue_push (pool->avail_buffers, buffer);
    }
  } else {
    GST_LOG_OBJECT (pool->v4l2elem, "the pool is shutting down");
  }

  if (resuscitated) {
    /* FIXME: check that the caps didn't change */
    GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index);
    gst_buffer_ref (GST_BUFFER (buffer));
    GST_BUFFER_SIZE (buffer) = 0;
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
    pool->buffers[index] = buffer;
  }

  GST_V4L2_BUFFER_POOL_UNLOCK (pool);

  if (!resuscitated) {
    GST_LOG_OBJECT (pool->v4l2elem,
        "buffer %p (data %p, len %u) not recovered, unmapping",
        buffer, GST_BUFFER_DATA (buffer), buffer->mmap_length);
    gst_mini_object_unref (GST_MINI_OBJECT (pool));
    v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->mmap_length);

    GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
        (buffer));
  }
}
Пример #10
0
static GstFlowReturn
gst_inter_sub_src_create (GstBaseSrc * src, guint64 offset, guint size,
    GstBuffer ** buf)
{
  GstInterSubSrc *intersubsrc = GST_INTER_SUB_SRC (src);
  GstBuffer *buffer;

  GST_DEBUG_OBJECT (intersubsrc, "create");

  buffer = NULL;

  g_mutex_lock (intersubsrc->surface->mutex);
  if (intersubsrc->surface->sub_buffer) {
    buffer = gst_buffer_ref (intersubsrc->surface->sub_buffer);
    //intersubsrc->surface->sub_buffer_count++;
    //if (intersubsrc->surface->sub_buffer_count >= 30) {
    gst_buffer_unref (intersubsrc->surface->sub_buffer);
    intersubsrc->surface->sub_buffer = NULL;
    //}
  }
  g_mutex_unlock (intersubsrc->surface->mutex);

  if (buffer == NULL) {
    guint8 *data;

    buffer = gst_buffer_new_and_alloc (1);

    data = GST_BUFFER_DATA (buffer);
    data[0] = 0;
  }

  buffer = gst_buffer_make_metadata_writable (buffer);

  GST_BUFFER_TIMESTAMP (buffer) =
      gst_util_uint64_scale_int (GST_SECOND, intersubsrc->n_frames,
      intersubsrc->rate);
  GST_DEBUG_OBJECT (intersubsrc, "create ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
  GST_BUFFER_DURATION (buffer) =
      gst_util_uint64_scale_int (GST_SECOND, (intersubsrc->n_frames + 1),
      intersubsrc->rate) - GST_BUFFER_TIMESTAMP (buffer);
  GST_BUFFER_OFFSET (buffer) = intersubsrc->n_frames;
  GST_BUFFER_OFFSET_END (buffer) = -1;
  GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
  if (intersubsrc->n_frames == 0) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
  }
  gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (intersubsrc)));
  intersubsrc->n_frames++;

  *buf = buffer;

  return GST_FLOW_OK;
}
Пример #11
0
static void
gst_video_parse_set_buffer_flags (GstRawParse * rp, GstBuffer * buffer)
{
  GstVideoParse *vp = GST_VIDEO_PARSE (rp);

  if (vp->interlaced) {
    if (vp->top_field_first) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    } else {
      GST_BUFFER_FLAG_UNSET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    }
  }
}
Пример #12
0
static gboolean
gst_audio_aggregator_mix_buffer (GstAudioAggregator * aagg,
    GstAudioAggregatorPad * pad, GstBuffer * inbuf, GstBuffer * outbuf)
{
  guint overlap;
  guint out_start;
  gboolean filled;
  guint blocksize;

  blocksize = gst_util_uint64_scale (aagg->priv->output_buffer_duration,
      GST_AUDIO_INFO_RATE (&aagg->info), GST_SECOND);
  blocksize = MAX (1, blocksize);

  /* Overlap => mix */
  if (aagg->priv->offset < pad->priv->output_offset)
    out_start = pad->priv->output_offset - aagg->priv->offset;
  else
    out_start = 0;

  overlap = pad->priv->size - pad->priv->position;
  if (overlap > blocksize - out_start)
    overlap = blocksize - out_start;

  if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP)) {
    /* skip gap buffer */
    GST_LOG_OBJECT (pad, "skipping GAP buffer");
    pad->priv->output_offset += pad->priv->size - pad->priv->position;
    pad->priv->position = pad->priv->size;

    gst_buffer_replace (&pad->priv->buffer, NULL);
    return FALSE;
  }

  filled = GST_AUDIO_AGGREGATOR_GET_CLASS (aagg)->aggregate_one_buffer (aagg,
      pad, inbuf, pad->priv->position, outbuf, out_start, overlap);

  if (filled)
    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);

  pad->priv->position += overlap;
  pad->priv->output_offset += overlap;

  if (pad->priv->position == pad->priv->size) {
    /* Buffer done, drop it */
    gst_buffer_replace (&pad->priv->buffer, NULL);
    GST_DEBUG_OBJECT (pad, "Finished mixing buffer, waiting for next");
    return FALSE;
  }

  return TRUE;
}
Пример #13
0
static gboolean gst_imx_blitter_video_transform_copy_metadata(G_GNUC_UNUSED GstBaseTransform *trans, GstBuffer *input, GstBuffer *outbuf)
{
	/* Only copy timestamps; the rest of the metadata must not be copied */
	GST_BUFFER_DTS(outbuf) = GST_BUFFER_DTS(input);
	GST_BUFFER_PTS(outbuf) = GST_BUFFER_PTS(input);

	/* For GStreamer 1.3.1 and newer, make sure the GST_BUFFER_FLAG_TAG_MEMORY flag
	 * isn't copied, otherwise the output buffer will be reallocated all the time */
	GST_BUFFER_FLAGS(outbuf) = GST_BUFFER_FLAGS(input);
#if GST_CHECK_VERSION(1, 3, 1)
	GST_BUFFER_FLAG_UNSET(outbuf, GST_BUFFER_FLAG_TAG_MEMORY);
#endif

	return TRUE;
}
Пример #14
0
static void
gst_droid_buffer_pool_reset_buffer (GstBufferPool * pool, GstBuffer * buffer)
{
  GstDroidBufferPool *dpool = GST_DROID_BUFFER_POOL (pool);

  gst_buffer_remove_all_memory (buffer);
  GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);

  g_mutex_lock (&dpool->lock);
  ++dpool->num_buffers;
  GST_DEBUG_OBJECT (dpool, "num buffers: %d", dpool->num_buffers);
  g_cond_signal (&dpool->cond);
  g_mutex_unlock (&dpool->lock);

  return GST_BUFFER_POOL_CLASS (parent_class)->reset_buffer (pool, buffer);
}
static FrameNode *
deserialize_framenode (const gchar ** names, const gchar ** values)
{
  gint i;

  FrameNode *framenode = g_slice_new0 (FrameNode);

  for (i = 0; names[i] != NULL; i++) {
    if (g_strcmp0 (names[i], "id") == 0)
      framenode->id = g_ascii_strtoull (values[i], NULL, 0);
    else if (g_strcmp0 (names[i], "offset") == 0)
      framenode->offset = g_ascii_strtoull (values[i], NULL, 0);
    else if (g_strcmp0 (names[i], "offset-end") == 0)
      framenode->offset_end = g_ascii_strtoull (values[i], NULL, 0);
    else if (g_strcmp0 (names[i], "duration") == 0)
      framenode->duration = g_ascii_strtoull (values[i], NULL, 0);
    else if (g_strcmp0 (names[i], "pts") == 0)
      framenode->pts = g_ascii_strtoull (values[i], NULL, 0);
    else if (g_strcmp0 (names[i], "dts") == 0)
      framenode->dts = g_ascii_strtoull (values[i], NULL, 0);
    else if (g_strcmp0 (names[i], "checksum") == 0)
      framenode->checksum = g_strdup (values[i]);
    else if (g_strcmp0 (names[i], "is-keyframe") == 0) {
      if (!g_ascii_strcasecmp (values[i], "true"))
        framenode->is_keyframe = TRUE;
      else
        framenode->is_keyframe = FALSE;
    }
  }

  framenode->buf = gst_buffer_new_wrapped (framenode->checksum,
      strlen (framenode->checksum) + 1);

  GST_BUFFER_OFFSET (framenode->buf) = framenode->offset;
  GST_BUFFER_OFFSET_END (framenode->buf) = framenode->offset_end;
  GST_BUFFER_DURATION (framenode->buf) = framenode->duration;
  GST_BUFFER_PTS (framenode->buf) = framenode->pts;
  GST_BUFFER_DTS (framenode->buf) = framenode->dts;

  if (framenode->is_keyframe) {
    GST_BUFFER_FLAG_UNSET (framenode->buf, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (framenode->buf, GST_BUFFER_FLAG_DELTA_UNIT);
  }

  return framenode;
}
Пример #16
0
/* prepare a buffer for transmission by passing data through libtheora */
static GstFlowReturn
theora_buffer_from_packet (GstTheoraEnc * enc, ogg_packet * packet,
    GstClockTime timestamp, GstClockTime running_time,
    GstClockTime duration, GstBuffer ** buffer)
{
  GstBuffer *buf;
  GstFlowReturn ret = GST_FLOW_OK;

  buf = gst_buffer_new_and_alloc (packet->bytes);
  if (!buf) {
    GST_WARNING_OBJECT (enc, "Could not allocate buffer");
    ret = GST_FLOW_ERROR;
    goto done;
  }

  memcpy (GST_BUFFER_DATA (buf), packet->packet, packet->bytes);
  gst_buffer_set_caps (buf, GST_PAD_CAPS (enc->srcpad));
  /* see ext/ogg/README; OFFSET_END takes "our" granulepos, OFFSET its
   * time representation */
  GST_BUFFER_OFFSET_END (buf) =
      granulepos_add (packet->granulepos, enc->granulepos_offset,
      enc->granule_shift);
  GST_BUFFER_OFFSET (buf) = granulepos_to_timestamp (enc,
      GST_BUFFER_OFFSET_END (buf));

  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;

  if (enc->next_discont) {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
    enc->next_discont = FALSE;
  }

  /* the second most significant bit of the first data byte is cleared
   * for keyframes */
  if ((packet->packet[0] & 0x40) == 0) {
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
  } else {
    GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
  }
  enc->packetno++;

done:
  *buffer = buf;
  return ret;
}
Пример #17
0
static GstBuffer *
_create_buffer (BufferDesc * bdesc)
{
  gchar *tmp = g_strdup (bdesc->content);
  GstBuffer *buffer =
      gst_buffer_new_wrapped (tmp, strlen (tmp) * sizeof (gchar));

  GST_BUFFER_DTS (buffer) = bdesc->dts;
  GST_BUFFER_PTS (buffer) = bdesc->pts;
  GST_BUFFER_DURATION (buffer) = bdesc->duration;

  if (bdesc->keyframe)
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

  return buffer;
}
Пример #18
0
static gboolean gst_imx_blitter_video_transform_copy_metadata(G_GNUC_UNUSED GstBaseTransform *trans, GstBuffer *input, GstBuffer *outbuf)
{
	/* Copy PTS, DTS, duration, offset, offset-end
	 * These do not change in the videotransform operation */
	GST_BUFFER_DTS(outbuf) = GST_BUFFER_DTS(input);
	GST_BUFFER_PTS(outbuf) = GST_BUFFER_PTS(input);
	GST_BUFFER_DURATION(outbuf) = GST_BUFFER_DURATION(input);
	GST_BUFFER_OFFSET(outbuf) = GST_BUFFER_OFFSET(input);
	GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET_END(input);

	/* For GStreamer 1.3.1 and newer, make sure the GST_BUFFER_FLAG_TAG_MEMORY flag
	 * isn't copied, otherwise the output buffer will be reallocated all the time */
	GST_BUFFER_FLAGS(outbuf) = GST_BUFFER_FLAGS(input);
#if GST_CHECK_VERSION(1, 3, 1)
	GST_BUFFER_FLAG_UNSET(outbuf, GST_BUFFER_FLAG_TAG_MEMORY);
#endif

	return TRUE;
}
Пример #19
0
void PlaybackPipeline::enqueueSample(RefPtr<MediaSample> mediaSample)
{
    ASSERT(WTF::isMainThread());

    AtomicString trackId = mediaSample->trackID();

    GST_TRACE("enqueing sample trackId=%s PTS=%f presentationSize=%.0fx%.0f at %" GST_TIME_FORMAT " duration: %" GST_TIME_FORMAT,
        trackId.string().utf8().data(), mediaSample->presentationTime().toFloat(),
        mediaSample->presentationSize().width(), mediaSample->presentationSize().height(),
        GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->presentationTime().toDouble())),
        GST_TIME_ARGS(WebCore::toGstClockTime(mediaSample->duration().toDouble())));

    Stream* stream = getStreamByTrackId(m_webKitMediaSrc.get(), trackId);

    if (!stream) {
        GST_WARNING("No stream!");
        return;
    }

    if (!stream->sourceBuffer->isReadyForMoreSamples(trackId)) {
        GST_DEBUG("enqueueSample: skip adding new sample for trackId=%s, SB is not ready yet", trackId.string().utf8().data());
        return;
    }

    GstElement* appsrc = stream->appsrc;
    MediaTime lastEnqueuedTime = stream->lastEnqueuedTime;

    GStreamerMediaSample* sample = static_cast<GStreamerMediaSample*>(mediaSample.get());
    if (sample->sample() && gst_sample_get_buffer(sample->sample())) {
        GRefPtr<GstSample> gstSample = sample->sample();
        GstBuffer* buffer = gst_sample_get_buffer(gstSample.get());
        lastEnqueuedTime = sample->presentationTime();

        GST_BUFFER_FLAG_UNSET(buffer, GST_BUFFER_FLAG_DECODE_ONLY);
        pushSample(GST_APP_SRC(appsrc), gstSample.get());
        // gst_app_src_push_sample() uses transfer-none for gstSample.

        stream->lastEnqueuedTime = lastEnqueuedTime;
    }
}
Пример #20
0
GstFlowReturn
gst_base_video_parse_push (GstBaseVideoParse * base_video_parse,
    GstBuffer * buffer)
{
  GstBaseVideoParseClass *base_video_parse_class;

  base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);

  if (base_video_parse->caps == NULL) {
    gboolean ret;

    base_video_parse->caps =
        base_video_parse_class->get_caps (base_video_parse);

    ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
        base_video_parse->caps);

    if (!ret) {
      GST_WARNING ("pad didn't accept caps");
      return GST_FLOW_ERROR;
    }
  }
  gst_buffer_set_caps (buffer, base_video_parse->caps);

  GST_DEBUG ("pushing ts=%" GST_TIME_FORMAT " dur=%" GST_TIME_FORMAT
      " off=%" G_GUINT64_FORMAT " off_end=%" G_GUINT64_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
      GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)),
      GST_BUFFER_OFFSET (buffer), GST_BUFFER_OFFSET_END (buffer));

  if (base_video_parse->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    base_video_parse->discont = FALSE;
  } else {
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
  }

  return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), buffer);
}
Пример #21
0
/**
 * gst_v4l2_buffer_pool_get:
 * @pool:   the "this" object
 * @blocking:  should this call suspend until there is a buffer available
 *    in the buffer pool?
 *
 * Get an available buffer in the pool
 */
GstV4l2Buffer *
gst_v4l2_buffer_pool_get (GstV4l2BufferPool * pool, gboolean blocking)
{
  GstV4l2Buffer *buf;

  if (blocking) {
    buf = g_async_queue_pop (pool->avail_buffers);
  } else {
    buf = g_async_queue_try_pop (pool->avail_buffers);
  }

  if (buf) {
    GST_V4L2_BUFFER_POOL_LOCK (pool);
    GST_BUFFER_SIZE (buf) = buf->vbuffer.length;
    GST_BUFFER_FLAG_UNSET (buf, 0xffffffff);
    GST_V4L2_BUFFER_POOL_UNLOCK (pool);
  }

  pool->running = TRUE;

  return buf;
}
Пример #22
0
static GstFlowReturn
gst_h263_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  GstH263Parse *h263parse;
  GstBuffer *buffer;
  GstFlowReturn res;
  H263Params params = { 0, };

  h263parse = GST_H263_PARSE (parse);
  buffer = frame->buffer;

  res = gst_h263_parse_get_params (&params, buffer, TRUE, &h263parse->state);
  if (res != GST_FLOW_OK)
    goto out;

  if (h263parse->state == PASSTHROUGH || h263parse->state == PARSING) {
    /* There's a feature we don't support, or we didn't have enough data to
     * parse the header, which should not be possible. Either way, go into
     * passthrough mode and let downstream handle it if it can. */
    GST_WARNING ("Couldn't parse header - setting passthrough mode");
    gst_base_parse_set_passthrough (parse, TRUE);
    goto out;
  }

  /* h263parse->state is now GOT_HEADER */

  gst_buffer_set_caps (buffer,
      GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (h263parse))));

  if (gst_h263_parse_is_delta_unit (&params))
    GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  else
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);

out:

  return res;
}
/**
 * gst_overlay_buffer_pool_get:
 * @pool:   the "this" object
 * @blocking:  should this call suspend until there is a buffer available
 *    in the buffer pool?
 *
 * Get an available buffer in the pool
 */
GstOverlayBuffer *
gst_overlay_buffer_pool_get (GstOverlayBufferPool * pool, gboolean blocking)
{
  GstOverlayBuffer *buf;
  GstSurfaceFlingerSink *surfacesink = GST_SURFACEFLINGERSINK (pool->overlayelem);

  if (blocking) {
    buf = g_async_queue_pop (pool->avail_buffers);
  } else {
    buf = g_async_queue_try_pop (pool->avail_buffers);
  }

  if (buf && buf != GST_OVERLAY_BUFFER_SENTINEL) {
    GST_OVERLAY_BUFFER_POOL_LOCK (pool);
    GST_BUFFER_SIZE (buf) = videoflinger_device_get_overlay_buf_length(surfacesink->videodev);
    GST_BUFFER_FLAG_UNSET (buf, 0xffffffff);
    GST_OVERLAY_BUFFER_POOL_UNLOCK (pool);
  }

  pool->running = TRUE;

  return buf;
}
Пример #24
0
static GstFlowReturn
gst_irtsp_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
  /* HACK HACK skip header.
   * could also ask baseparse to skip this,
   * but that would give us a discontinuity for free
   * which is a bit too much to have on all our packets */
  GST_BUFFER_DATA (frame->buffer) += 4;
  GST_BUFFER_SIZE (frame->buffer) -= 4;

  if (!GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (parse))) {
    GstCaps *caps;

    caps = gst_caps_new_simple ("application/x-rtp", NULL);
    gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
    gst_caps_unref (caps);
  }

  GST_BUFFER_FLAG_UNSET (frame->buffer, GST_BUFFER_FLAG_DISCONT);

  return GST_FLOW_OK;

}
Пример #25
0
GstBuffer *
rsn_wrappedbuffer_unwrap_and_unref (RsnWrappedBuffer * wrap_buf)
{
  GstBuffer *buf;
  gboolean is_readonly;

  g_return_val_if_fail (wrap_buf != NULL, NULL);
  g_return_val_if_fail (wrap_buf->wrapped_buffer != NULL, NULL);

  buf = gst_buffer_ref (wrap_buf->wrapped_buffer);

  /* Copy changed metadata back to the wrapped buffer from the wrapper,
   * except the the read-only flag and the caps. */
  is_readonly = GST_BUFFER_FLAG_IS_SET (wrap_buf, GST_BUFFER_FLAG_READONLY);
  gst_buffer_copy_metadata (buf, GST_BUFFER (wrap_buf),
      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
  if (!is_readonly)
    GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_READONLY);

  gst_buffer_unref (GST_BUFFER (wrap_buf));

  return buf;
}
Пример #26
0
static GstFlowReturn
gst_rtp_ulpfec_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstRtpUlpFecDec *self = GST_RTP_ULPFEC_DEC (parent);

  if (G_LIKELY (GST_FLOW_OK == self->chain_return_val)) {
    GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
    buf = gst_buffer_make_writable (buf);

    if (G_UNLIKELY (self->unset_discont_flag)) {
      self->unset_discont_flag = FALSE;
      GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
    }

    gst_rtp_buffer_map (buf, GST_MAP_WRITE, &rtp);
    gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++);
    gst_rtp_buffer_unmap (&rtp);

    return gst_pad_push (self->srcpad, buf);
  }

  gst_buffer_unref (buf);
  return self->chain_return_val;
}
Пример #27
0
static GstFlowReturn
gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
{
  GstMultipartDemux *multipart;
  GstAdapter *adapter;
  gint size = 1;
  GstFlowReturn res;

  multipart = GST_MULTIPART_DEMUX (parent);
  adapter = multipart->adapter;

  res = GST_FLOW_OK;

  if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
    GSList *l;

    for (l = multipart->srcpads; l != NULL; l = l->next) {
      GstMultipartPad *srcpad = l->data;

      srcpad->discont = TRUE;
    }
    gst_adapter_clear (adapter);
  }
  gst_adapter_push (adapter, buf);

  while (gst_adapter_available (adapter) > 0) {
    GstMultipartPad *srcpad;
    GstBuffer *outbuf;
    gboolean created;
    gint datalen;

    if (G_UNLIKELY (!multipart->header_completed)) {
      if ((size = multipart_parse_header (multipart)) < 0) {
        goto nodata;
      } else {
        gst_adapter_flush (adapter, size);
        multipart->header_completed = TRUE;
      }
    }
    if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
      goto nodata;
    }

    /* Invalidate header info */
    multipart->header_completed = FALSE;
    multipart->content_length = -1;

    if (G_UNLIKELY (datalen <= 0)) {
      GST_DEBUG_OBJECT (multipart, "skipping empty content.");
      gst_adapter_flush (adapter, size - datalen);
    } else {
      GstClockTime ts;

      srcpad =
          gst_multipart_find_pad_by_mime (multipart,
          multipart->mime_type, &created);

      ts = gst_adapter_prev_pts (adapter, NULL);
      outbuf = gst_adapter_take_buffer (adapter, datalen);
      gst_adapter_flush (adapter, size - datalen);

      if (created) {
        GstTagList *tags;
        GstSegment segment;

        gst_segment_init (&segment, GST_FORMAT_TIME);

        /* Push new segment, first buffer has 0 timestamp */
        gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment));

        tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
        gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
        gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));
      }

      outbuf = gst_buffer_make_writable (outbuf);
      if (srcpad->last_ts == GST_CLOCK_TIME_NONE || srcpad->last_ts != ts) {
        GST_BUFFER_TIMESTAMP (outbuf) = ts;
        srcpad->last_ts = ts;
      } else {
        GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE;
      }

      if (srcpad->discont) {
        GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
        srcpad->discont = FALSE;
      } else {
        GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);
      }

      GST_DEBUG_OBJECT (multipart,
          "pushing buffer with timestamp %" GST_TIME_FORMAT,
          GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
      res = gst_pad_push (srcpad->pad, outbuf);
      res = gst_multipart_combine_flows (multipart, srcpad, res);
      if (res != GST_FLOW_OK)
        break;
    }
  }

nodata:
  if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
    return GST_FLOW_ERROR;
  if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
    return GST_FLOW_EOS;

  return res;
}
Пример #28
0
static GstFlowReturn
gst_bml_transform_transform_mono_to_stereo (GstBaseTransform * base,
    GstBuffer * inbuf, GstBuffer * outbuf)
{
  GstMapInfo infoi, infoo;
  GstBMLTransform *bml_transform = GST_BML_TRANSFORM (base);
  GstBMLTransformClass *klass = GST_BML_TRANSFORM_GET_CLASS (bml_transform);
  GstBML *bml = GST_BML (bml_transform);
  GstBMLClass *bml_class = GST_BML_CLASS (klass);
  BMLData *datai, *datao, *seg_datai, *seg_datao;
  gpointer bm = bml->bm;
  guint todo, seg_size, samples_per_buffer;
  gboolean has_data;
  guint mode = 3;               /*WM_READWRITE */

  bml->running_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (inbuf));

  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DISCONT)) {
    bml->subtick_count = (!bml->reverse) ? bml->subticks_per_tick : 1;
  }

  if (bml->subtick_count >= bml->subticks_per_tick) {
    bml (gstbml_reset_triggers (bml, bml_class));
    bml (gstbml_sync_values (bml, bml_class, GST_BUFFER_TIMESTAMP (outbuf)));
    bml (tick (bm));
    bml->subtick_count = 1;
  } else {
    bml->subtick_count++;
  }

  /* don't process data in passthrough-mode */
  if (gst_base_transform_is_passthrough (base)) {
    // we would actually need to convert mono to stereo here
    // but this is not even called
    GST_WARNING_OBJECT (bml_transform, "m2s in passthrough mode");
    //return GST_FLOW_OK;
  }

  if (!gst_buffer_map (inbuf, &infoi, GST_MAP_READ)) {
    GST_WARNING_OBJECT (base, "unable to map input buffer for read");
    return GST_FLOW_ERROR;
  }
  datai = (BMLData *) infoi.data;
  samples_per_buffer = infoi.size / sizeof (BMLData);
  if (!gst_buffer_map (outbuf, &infoo, GST_MAP_READ | GST_MAP_WRITE)) {
    GST_WARNING_OBJECT (base, "unable to map output buffer for read & write");
    return GST_FLOW_ERROR;
  }
  datao = (BMLData *) infoo.data;

  // some buzzmachines expect a cleared buffer
  //for(i=0;i<samples_per_buffer*2;i++) datao[i]=0.0f;
  memset (datao, 0, samples_per_buffer * 2 * sizeof (BMLData));

  /* if buffer has only silence process with different mode */
  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP)) {
    mode = 2;                   /* WM_WRITE */
  } else {
    gfloat fc = 32768.0;
    orc_scalarmultiply_f32_ns (datai, datai, fc, samples_per_buffer);
  }

  GST_DEBUG_OBJECT (bml_transform, "  calling work_m2s(%d,%d)",
      samples_per_buffer, mode);
  todo = samples_per_buffer;
  seg_datai = datai;
  seg_datao = datao;
  has_data = FALSE;
  while (todo) {
    // 256 is MachineInterface.h::MAX_BUFFER_LENGTH
    seg_size = (todo > 256) ? 256 : todo;
    has_data |= bml (work_m2s (bm, seg_datai, seg_datao, (int) seg_size, mode));
    seg_datai = &seg_datai[seg_size];
    seg_datao = &seg_datao[seg_size * 2];
    todo -= seg_size;
  }
  if (gstbml_fix_data ((GstElement *) bml_transform, &infoo, has_data)) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
  } else {
    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);
  }

  gst_buffer_unmap (inbuf, &infoi);
  gst_buffer_unmap (outbuf, &infoo);
  return (GST_FLOW_OK);
}
Пример #29
0
static GstFlowReturn
gst_bml_transform_transform_ip_stereo (GstBaseTransform * base,
    GstBuffer * outbuf)
{
  GstMapInfo info;
  GstBMLTransform *bml_transform = GST_BML_TRANSFORM (base);
  GstBMLTransformClass *klass = GST_BML_TRANSFORM_GET_CLASS (bml_transform);
  GstBML *bml = GST_BML (bml_transform);
  GstBMLClass *bml_class = GST_BML_CLASS (klass);
  BMLData *data, *seg_data;
  gpointer bm = bml->bm;
  guint todo, seg_size, samples_per_buffer;
  gboolean has_data;
  guint mode = 3;               /*WM_READWRITE */

  bml->running_time =
      gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (outbuf));

  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_DISCONT)) {
    bml->subtick_count = (!bml->reverse) ? bml->subticks_per_tick : 1;
  }

  /* TODO(ensonic): sync on subticks ? */
  if (bml->subtick_count >= bml->subticks_per_tick) {
    bml (gstbml_reset_triggers (bml, bml_class));
    bml (gstbml_sync_values (bml, bml_class, GST_BUFFER_TIMESTAMP (outbuf)));
    bml (tick (bm));
    bml->subtick_count = 1;
  } else {
    bml->subtick_count++;
  }

  /* don't process data in passthrough-mode */
  if (gst_base_transform_is_passthrough (base))
    return GST_FLOW_OK;

  if (!gst_buffer_map (outbuf, &info, GST_MAP_READ | GST_MAP_WRITE)) {
    GST_WARNING_OBJECT (base, "unable to map buffer for read & write");
    return GST_FLOW_ERROR;
  }
  data = (BMLData *) info.data;
  samples_per_buffer = info.size / (sizeof (BMLData) * 2);

  /* if buffer has only silence process with different mode */
  if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP)) {
    mode = 2;                   /* WM_WRITE */
  } else {
    gfloat fc = 32768.0;
    orc_scalarmultiply_f32_ns (data, data, fc, samples_per_buffer * 2);
  }

  GST_DEBUG_OBJECT (bml_transform, "  calling work_m2s(%d,%d)",
      samples_per_buffer, mode);
  todo = samples_per_buffer;
  seg_data = data;
  has_data = FALSE;
  while (todo) {
    // 256 is MachineInterface.h::MAX_BUFFER_LENGTH
    seg_size = (todo > 256) ? 256 : todo;
    // first seg_data can be NULL, its ignored
    has_data |= bml (work_m2s (bm, seg_data, seg_data, (int) seg_size, mode));
    seg_data = &seg_data[seg_size * 2];
    todo -= seg_size;
  }
  if (gstbml_fix_data ((GstElement *) bml_transform, &info, has_data)) {
    GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
  } else {
    GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_GAP);
  }

  gst_buffer_unmap (outbuf, &info);

  return (GST_FLOW_OK);
}
Пример #30
0
static gboolean
gst_ks_video_src_timestamp_buffer (GstKsVideoSrc * self, GstBuffer * buf,
    GstClockTime presentation_time)
{
  GstKsVideoSrcPrivate *priv = GST_KS_VIDEO_SRC_GET_PRIVATE (self);
  GstClockTime duration;
  GstClock *clock;
  GstClockTime timestamp;

  duration = gst_ks_video_device_get_duration (priv->device);

  GST_OBJECT_LOCK (self);
  clock = GST_ELEMENT_CLOCK (self);
  if (clock != NULL) {
    gst_object_ref (clock);
    timestamp = GST_ELEMENT (self)->base_time;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      if (presentation_time > GST_ELEMENT (self)->base_time)
        presentation_time -= GST_ELEMENT (self)->base_time;
      else
        presentation_time = 0;
    }
  } else {
    timestamp = GST_CLOCK_TIME_NONE;
  }
  GST_OBJECT_UNLOCK (self);

  if (clock != NULL) {

    /* The time according to the current clock */
    timestamp = gst_clock_get_time (clock) - timestamp;
    if (timestamp > duration)
      timestamp -= duration;
    else
      timestamp = 0;

    if (GST_CLOCK_TIME_IS_VALID (presentation_time)) {
      /*
       * We don't use this for anything yet, need to ponder how to deal
       * with pins that use an internal clock and timestamp from 0.
       */
      GstClockTimeDiff diff = GST_CLOCK_DIFF (presentation_time, timestamp);
      GST_DEBUG_OBJECT (self, "diff between gst and driver timestamp: %"
          G_GINT64_FORMAT, diff);
    }

    gst_object_unref (clock);
    clock = NULL;

    /* Unless it's the first frame, align the current timestamp on a multiple
     * of duration since the previous */
    if (GST_CLOCK_TIME_IS_VALID (priv->prev_ts)) {
      GstClockTime delta;
      guint delta_remainder, delta_offset;

      /* REVISIT: I've seen this happen with the GstSystemClock on Windows,
       *          scary... */
      if (timestamp < priv->prev_ts) {
        GST_INFO_OBJECT (self, "clock is ticking backwards");
        return FALSE;
      }

      /* Round to a duration boundary */
      delta = timestamp - priv->prev_ts;
      delta_remainder = delta % duration;

      if (delta_remainder < duration / 3)
        timestamp -= delta_remainder;
      else
        timestamp += duration - delta_remainder;

      /* How many frames are we off then? */
      delta = timestamp - priv->prev_ts;
      delta_offset = delta / duration;

      if (delta_offset == 1)    /* perfect */
        GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
      else if (delta_offset > 1) {
        guint lost = delta_offset - 1;
        GST_INFO_OBJECT (self, "lost %d frame%s, setting discont flag",
            lost, (lost > 1) ? "s" : "");
        GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
      } else if (delta_offset == 0) {   /* overproduction, skip this frame */
        GST_INFO_OBJECT (self, "skipping frame");
        return FALSE;
      }

      priv->offset += delta_offset;
    }

    priv->prev_ts = timestamp;
  }

  GST_BUFFER_OFFSET (buf) = priv->offset;
  GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET (buf) + 1;
  GST_BUFFER_TIMESTAMP (buf) = timestamp;
  GST_BUFFER_DURATION (buf) = duration;

  return TRUE;
}