static void
gst_core_media_meta_free (GstCoreMediaMeta * meta, GstBuffer * buf)
{
  if (meta->image_buf != NULL) {
    GstCVApi *cv = meta->ctx->cv;
    cv->CVPixelBufferUnlockBaseAddress (meta->image_buf,
        kCVPixelBufferLock_ReadOnly);
  }
  meta->ctx->cm->FigSampleBufferRelease (meta->sample_buf);
  g_object_unref (meta->ctx);
}
static void
gst_core_video_meta_free (GstCoreVideoMeta * meta, GstBuffer * buf)
{
  GstCVApi *cv = meta->ctx->cv;

  if (meta->pixbuf != NULL) {
    cv->CVPixelBufferUnlockBaseAddress (meta->pixbuf,
        kCVPixelBufferLock_ReadOnly);
  }

  cv->CVBufferRelease (meta->cvbuf);
  g_object_unref (meta->ctx);
}
示例#3
0
static void
gst_core_video_buffer_finalize (GstMiniObject * mini_object)
{
  GstCoreVideoBuffer *self = GST_CORE_VIDEO_BUFFER_CAST (mini_object);
  GstCVApi *cv = self->ctx->cv;

  if (self->pixbuf != NULL) {
    cv->CVPixelBufferUnlockBaseAddress (self->pixbuf,
        kCVPixelBufferLock_ReadOnly);
  }

  cv->CVBufferRelease (self->cvbuf);

  g_object_unref (self->ctx);

  GST_MINI_OBJECT_CLASS (gst_core_video_buffer_parent_class)->finalize
      (mini_object);
}
示例#4
0
GstBuffer *
gst_core_video_buffer_new (GstCoreMediaCtx * ctx, CVBufferRef cvbuf)
{
  GstCVApi *cv = ctx->cv;
  void *data;
  size_t size;
  CVPixelBufferRef pixbuf = NULL;
  GstCoreVideoBuffer *buf;

  if (CFGetTypeID (cvbuf) == cv->CVPixelBufferGetTypeID ()) {
    pixbuf = (CVPixelBufferRef) cvbuf;

    if (cv->CVPixelBufferLockBaseAddress (pixbuf,
            kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) {
      goto error;
    }
    data = cv->CVPixelBufferGetBaseAddress (pixbuf);
    size = cv->CVPixelBufferGetBytesPerRow (pixbuf) *
        cv->CVPixelBufferGetHeight (pixbuf);
  } else {
    /* TODO: Do we need to handle other buffer types? */
    goto error;
  }

  buf = GST_CORE_VIDEO_BUFFER_CAST (gst_mini_object_new
      (GST_TYPE_CORE_VIDEO_BUFFER));
  buf->ctx = g_object_ref (ctx);
  buf->cvbuf = cv->CVBufferRetain (cvbuf);
  buf->pixbuf = pixbuf;

  GST_BUFFER_DATA (buf) = data;
  GST_BUFFER_SIZE (buf) = size;

  return GST_BUFFER_CAST (buf);

error:
  return NULL;
}
GstBuffer *
gst_core_media_buffer_new (GstCoreMediaCtx * ctx, CMSampleBufferRef sample_buf)
{
  GstCVApi *cv = ctx->cv;
  GstCMApi *cm = ctx->cm;
  CVImageBufferRef image_buf;
  CVPixelBufferRef pixel_buf;
  CMBlockBufferRef block_buf;
  Byte *data = NULL;
  UInt32 size;
  OSStatus status;
  GstBuffer *buf;
  GstCoreMediaMeta *meta;

  image_buf = cm->CMSampleBufferGetImageBuffer (sample_buf);
  pixel_buf = NULL;
  block_buf = cm->CMSampleBufferGetDataBuffer (sample_buf);

  if (image_buf != NULL &&
      CFGetTypeID (image_buf) == cv->CVPixelBufferGetTypeID ()) {
    pixel_buf = (CVPixelBufferRef) image_buf;

    if (cv->CVPixelBufferLockBaseAddress (pixel_buf,
            kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) {
      goto error;
    }

    if (cv->CVPixelBufferIsPlanar (pixel_buf)) {
      gint plane_count, plane_idx;

      data = cv->CVPixelBufferGetBaseAddressOfPlane (pixel_buf, 0);

      size = 0;
      plane_count = cv->CVPixelBufferGetPlaneCount (pixel_buf);
      for (plane_idx = 0; plane_idx != plane_count; plane_idx++) {
        size += cv->CVPixelBufferGetBytesPerRowOfPlane (pixel_buf, plane_idx) *
            cv->CVPixelBufferGetHeightOfPlane (pixel_buf, plane_idx);
      }
    } else {
      data = cv->CVPixelBufferGetBaseAddress (pixel_buf);
      size = cv->CVPixelBufferGetBytesPerRow (pixel_buf) *
          cv->CVPixelBufferGetHeight (pixel_buf);
    }
  } else if (block_buf != NULL) {
    status = cm->CMBlockBufferGetDataPointer (block_buf, 0, 0, 0, &data);
    if (status != noErr)
      goto error;
    size = cm->CMBlockBufferGetDataLength (block_buf);
  } else {
    goto error;
  }

  buf = gst_buffer_new ();

  meta = (GstCoreMediaMeta *) gst_buffer_add_meta (buf,
      gst_core_media_meta_get_info (), NULL);
  meta->ctx = g_object_ref (ctx);
  meta->sample_buf = cm->FigSampleBufferRetain (sample_buf);
  meta->image_buf = image_buf;
  meta->pixel_buf = pixel_buf;
  meta->block_buf = block_buf;

  gst_buffer_append_memory (buf,
      gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data,
          size, 0, size, NULL, NULL));

  return buf;

error:
  return NULL;
}
示例#6
0
static GstFlowReturn
gst_vtenc_encode_frame (GstVTEnc * self, GstBuffer * buf)
{
  GstCVApi *cv = self->ctx->cv;
  GstVTApi *vt = self->ctx->vt;
  CMTime ts, duration;
  CVPixelBufferRef pbuf = NULL;
  VTStatus vt_status;
  GstFlowReturn ret = GST_FLOW_OK;
  guint i;

  self->cur_inbuf = buf;

  ts = self->ctx->cm->CMTimeMake
      (GST_TIME_AS_MSECONDS (GST_BUFFER_TIMESTAMP (buf)), 1000);
  duration = self->ctx->cm->CMTimeMake
      (GST_TIME_AS_MSECONDS (GST_BUFFER_DURATION (buf)), 1000);

  if (GST_IS_CORE_MEDIA_BUFFER (buf)) {
    GstCoreMediaBuffer *cmbuf = GST_CORE_MEDIA_BUFFER_CAST (buf);
    pbuf = gst_core_media_buffer_get_pixel_buffer (cmbuf);
  }

  if (pbuf == NULL) {
    CVReturn cv_ret;

    cv_ret = cv->CVPixelBufferCreateWithBytes (NULL,
        self->negotiated_width, self->negotiated_height,
        kCVPixelFormatType_422YpCbCr8Deprecated, GST_BUFFER_DATA (buf),
        self->negotiated_width * 2,
        (CVPixelBufferReleaseBytesCallback) gst_buffer_unref, buf, NULL, &pbuf);
    if (cv_ret != kCVReturnSuccess)
      goto cv_error;
    gst_buffer_ref (buf);
  }

  GST_OBJECT_LOCK (self);

  self->expect_keyframe = CFDictionaryContainsKey (self->options,
      *(vt->kVTEncodeFrameOptionKey_ForceKeyFrame));
  if (self->expect_keyframe)
    gst_vtenc_clear_cached_caps_downstream (self);

  vt_status = self->ctx->vt->VTCompressionSessionEncodeFrame (self->session,
      pbuf, ts, duration, self->options, NULL, NULL);

  if (vt_status != 0) {
    GST_WARNING_OBJECT (self, "VTCompressionSessionEncodeFrame returned %d",
        vt_status);
  }

  self->ctx->vt->VTCompressionSessionCompleteFrames (self->session,
      *(self->ctx->cm->kCMTimeInvalid));

  GST_OBJECT_UNLOCK (self);

  cv->CVPixelBufferRelease (pbuf);
  self->cur_inbuf = NULL;
  gst_buffer_unref (buf);

  if (self->cur_outbufs->len > 0) {
    GstCoreMediaBuffer *cmbuf =
        GST_CORE_MEDIA_BUFFER_CAST (g_ptr_array_index (self->cur_outbufs, 0));
    if (!gst_vtenc_negotiate_downstream (self, cmbuf->sample_buf))
      ret = GST_FLOW_NOT_NEGOTIATED;
  }

  for (i = 0; i != self->cur_outbufs->len; i++) {
    GstBuffer *buf = g_ptr_array_index (self->cur_outbufs, i);

    if (ret == GST_FLOW_OK) {
      gst_buffer_set_caps (buf, GST_PAD_CAPS (self->srcpad));
      ret = gst_pad_push (self->srcpad, buf);
    } else {
      gst_buffer_unref (buf);
    }
  }
  g_ptr_array_set_size (self->cur_outbufs, 0);

  return ret;

cv_error:
  {
    self->cur_inbuf = NULL;
    gst_buffer_unref (buf);

    return GST_FLOW_ERROR;
  }
}
GstBuffer *
gst_core_video_buffer_new (GstCoreMediaCtx * ctx, CVBufferRef cvbuf,
    GstVideoInfo * vinfo)
{
  GstCVApi *cv = ctx->cv;
  void *data;
  size_t size;
  CVPixelBufferRef pixbuf = NULL;
  GstBuffer *buf;
  GstCoreVideoMeta *meta;
  guint width, height, n_planes, i;
  gsize offset[GST_VIDEO_MAX_PLANES];
  gint stride[GST_VIDEO_MAX_PLANES];

  if (CFGetTypeID (cvbuf) != cv->CVPixelBufferGetTypeID ())
    /* TODO: Do we need to handle other buffer types? */
    goto error;

  pixbuf = (CVPixelBufferRef) cvbuf;

  if (cv->CVPixelBufferLockBaseAddress (pixbuf,
          kCVPixelBufferLock_ReadOnly) != kCVReturnSuccess) {
    goto error;
  }

  buf = gst_buffer_new ();

  /* add the corevideo meta to free the underlying corevideo buffer */
  meta = (GstCoreVideoMeta *) gst_buffer_add_meta (buf,
      gst_core_video_meta_get_info (), NULL);
  meta->ctx = g_object_ref (ctx);
  meta->cvbuf = cv->CVBufferRetain (cvbuf);
  meta->pixbuf = pixbuf;

  /* set stride, offset and size */
  memset (&offset, 0, sizeof (offset));
  memset (&stride, 0, sizeof (stride));

  data = cv->CVPixelBufferGetBaseAddress (pixbuf);
  height = cv->CVPixelBufferGetHeight (pixbuf);
  if (cv->CVPixelBufferIsPlanar (pixbuf)) {
    GstVideoInfo tmp_vinfo;

    n_planes = cv->CVPixelBufferGetPlaneCount (pixbuf);
    for (i = 0; i < n_planes; ++i)
      stride[i] = cv->CVPixelBufferGetBytesPerRowOfPlane (pixbuf, i);

    /* FIXME: don't hardcode NV12 */
    gst_video_info_init (&tmp_vinfo);
    gst_video_info_set_format (&tmp_vinfo,
        GST_VIDEO_FORMAT_NV12, stride[0], height);
    offset[1] = tmp_vinfo.offset[1];
    size = tmp_vinfo.size;
  } else {
    n_planes = 1;
    size = cv->CVPixelBufferGetBytesPerRow (pixbuf) * height;
  }

  gst_buffer_append_memory (buf,
      gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE, data,
          size, 0, size, NULL, NULL));

  if (vinfo) {
    GstVideoMeta *video_meta;

    width = vinfo->width;
    video_meta =
        gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE,
        GST_VIDEO_FORMAT_NV12, width, height, n_planes, offset, stride);
  }

  return buf;

error:
  return NULL;
}