Beispiel #1
0
static GstFlowReturn
gst_mpeg2dec_alloc_buffer (GstMpeg2dec * mpeg2dec, GstVideoCodecFrame * frame,
    GstBuffer ** buffer)
{
  GstFlowReturn ret;
  GstVideoFrame vframe;
  guint8 *buf[3];

  ret =
      gst_mpeg2dec_alloc_sized_buf (mpeg2dec, mpeg2dec->decoded_info.size,
      frame, buffer);
  if (G_UNLIKELY (ret != GST_FLOW_OK))
    goto beach;

  if (mpeg2dec->need_cropping && mpeg2dec->has_cropping) {
    GstVideoCropMeta *crop;
    GstVideoCodecState *state;
    GstVideoInfo *vinfo;

    state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec));
    vinfo = &state->info;

    crop = gst_buffer_add_video_crop_meta (frame->output_buffer);
    /* we can do things slightly more efficient when we know that
     * downstream understands clipping */
    crop->x = 0;
    crop->y = 0;
    crop->width = vinfo->width;
    crop->height = vinfo->height;

    gst_video_codec_state_unref (state);
  }

  if (!gst_video_frame_map (&vframe, &mpeg2dec->decoded_info, *buffer,
          GST_MAP_READ | GST_MAP_WRITE))
    goto map_fail;

  buf[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  buf[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
  buf[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

  GST_DEBUG_OBJECT (mpeg2dec, "set_buf: %p %p %p, frame %i",
      buf[0], buf[1], buf[2], frame->system_frame_number);

  /* Note: We use a non-null 'id' value to make the distinction
   * between the dummy buffers (which have an id of NULL) and the
   * ones we did */
  mpeg2_set_buf (mpeg2dec->decoder, buf,
      GINT_TO_POINTER (frame->system_frame_number + 1));
  gst_mpeg2dec_save_buffer (mpeg2dec, frame->system_frame_number, &vframe);

beach:
  return ret;

map_fail:
  {
    GST_ERROR_OBJECT (mpeg2dec, "Failed to map frame");
    return GST_FLOW_ERROR;
  }
}
Beispiel #2
0
static void
gst_droidcamsrc_dev_prepare_buffer (GstDroidCamSrcDev * dev, GstBuffer * buffer,
    DroidMediaRect rect, int width, int height, GstVideoFormat format)
{
  GstDroidCamSrc *src = GST_DROIDCAMSRC (GST_PAD_PARENT (dev->imgsrc->pad));
  GstVideoCropMeta *crop;

  GST_LOG_OBJECT (src, "prepare buffer %" GST_PTR_FORMAT, buffer);

  gst_droidcamsrc_timestamp (src, buffer);

  crop = gst_buffer_add_video_crop_meta (buffer);
  crop->x = rect.left;
  crop->y = rect.top;
  crop->width = rect.right - rect.left;
  crop->height = rect.bottom - rect.top;

  gst_buffer_add_gst_buffer_orientation_meta (buffer,
      dev->info->orientation, dev->info->direction);

  gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
      format, width, height);

  GST_LOG_OBJECT (src, "preview info: w=%d, h=%d, crop: x=%d, y=%d, w=%d, h=%d",
      width, height, crop->x, crop->y, crop->width, crop->height);
}
static gboolean
gst_video_crop_meta_transform (GstBuffer * dest, GstMeta * meta,
    GstBuffer * buffer, GQuark type, gpointer data)
{
  GstVideoCropMeta *dmeta, *smeta;

  if (GST_META_TRANSFORM_IS_COPY (type)) {
    smeta = (GstVideoCropMeta *) meta;
    dmeta = gst_buffer_add_video_crop_meta (dest);

    GST_DEBUG ("copy crop metadata");
    dmeta->x = smeta->x;
    dmeta->y = smeta->y;
    dmeta->width = smeta->width;
    dmeta->height = smeta->height;
  } else if (GST_VIDEO_META_TRANSFORM_IS_SCALE (type)) {
    GstVideoMetaTransform *trans = data;
    gint ow, oh, nw, nh;

    smeta = (GstVideoCropMeta *) meta;
    dmeta = gst_buffer_add_video_crop_meta (dest);

    ow = GST_VIDEO_INFO_WIDTH (trans->in_info);
    nw = GST_VIDEO_INFO_WIDTH (trans->out_info);
    oh = GST_VIDEO_INFO_HEIGHT (trans->in_info);
    nh = GST_VIDEO_INFO_HEIGHT (trans->out_info);

    GST_DEBUG ("scaling crop metadata %dx%d -> %dx%d", ow, oh, nw, nh);
    dmeta->x = (smeta->x * nw) / ow;
    dmeta->y = (smeta->y * nh) / oh;
    dmeta->width = (smeta->width * nw) / ow;
    dmeta->height = (smeta->height * nh) / oh;
    GST_DEBUG ("crop offset %dx%d -> %dx%d", smeta->x, smeta->y, dmeta->x,
        dmeta->y);
    GST_DEBUG ("crop size   %dx%d -> %dx%d", smeta->width, smeta->height,
        dmeta->width, dmeta->height);
  }
  return TRUE;
}
Beispiel #4
0
/* The produced buffer contains only metadata, no memory blocks - the IPU sink does not need anything more
 * TODO: add some logic to wrap the framebuffer memory block, including map/unmap code etc. */
GstBuffer* gst_imx_ipu_blitter_wrap_framebuffer(GstImxIpuBlitter *ipu_blitter, int framebuffer_fd, guint x, guint y, guint width, guint height)
{
	guint fb_width, fb_height;
	GstVideoFormat fb_format;
	GstBuffer *buffer;
	GstImxPhysMemMeta *phys_mem_meta;
	struct fb_var_screeninfo fb_var;
	struct fb_fix_screeninfo fb_fix;

	if (ioctl(framebuffer_fd, FBIOGET_FSCREENINFO, &fb_fix) == -1)
	{
		GST_ERROR_OBJECT(ipu_blitter, "could not open get fixed screen info: %s", strerror(errno));
		return NULL;
	}

	if (ioctl(framebuffer_fd, FBIOGET_VSCREENINFO, &fb_var) == -1)
	{
		GST_ERROR_OBJECT(ipu_blitter, "could not open get variable screen info: %s", strerror(errno));
		return NULL;
	}

	fb_width = fb_var.xres;
	fb_height = fb_var.yres;
	fb_format = gst_imx_ipu_blitter_get_format_from_fb(ipu_blitter, &fb_var, &fb_fix);

	GST_DEBUG_OBJECT(ipu_blitter, "framebuffer resolution is %u x %u", fb_width, fb_height);

	buffer = gst_buffer_new();
	gst_buffer_add_video_meta(buffer, GST_VIDEO_FRAME_FLAG_NONE, fb_format, fb_width, fb_height);

	if ((width != 0) && (height != 0))
	{
		GstVideoCropMeta *video_crop_meta;
		
		video_crop_meta = gst_buffer_add_video_crop_meta(buffer);
		video_crop_meta->x = x;
		video_crop_meta->y = y;
		video_crop_meta->width = width;
		video_crop_meta->height = height;
	}

	phys_mem_meta = GST_IMX_PHYS_MEM_META_ADD(buffer);
	phys_mem_meta->phys_addr = (guintptr)(fb_fix.smem_start);

	return buffer;
}
/* Allocate buffer and copy image data into Y444 format */
static GstFlowReturn
theora_handle_image (GstTheoraDec * dec, th_ycbcr_buffer buf,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (dec);
  gint width, height, stride;
  GstFlowReturn result;
  gint i, comp;
  guint8 *dest, *src;
  GstVideoFrame vframe;
  gint pic_width, pic_height;
  gint offset_x, offset_y;

  result = gst_video_decoder_allocate_output_frame (decoder, frame);

  if (G_UNLIKELY (result != GST_FLOW_OK)) {
    GST_DEBUG_OBJECT (dec, "could not get buffer, reason: %s",
        gst_flow_get_name (result));
    return result;
  }

  if (!dec->can_crop) {
    /* we need to crop the hard way */
    offset_x = dec->info.pic_x;
    offset_y = dec->info.pic_y;
    pic_width = dec->info.pic_width;
    pic_height = dec->info.pic_height;
    /* Ensure correct offsets in chroma for formats that need it
     * by rounding the offset. libtheora will add proper pixels,
     * so no need to handle them ourselves. */
    if (offset_x & 1 && dec->info.pixel_fmt != TH_PF_444)
      offset_x--;
    if (offset_y & 1 && dec->info.pixel_fmt == TH_PF_420)
      offset_y--;
  } else {
    /* copy the whole frame */
    offset_x = 0;
    offset_y = 0;
    pic_width = dec->info.frame_width;
    pic_height = dec->info.frame_height;

    if (dec->info.pic_width != dec->info.frame_width ||
        dec->info.pic_height != dec->info.frame_height ||
        dec->info.pic_x != 0 || dec->info.pic_y != 0) {
      GstVideoMeta *vmeta;
      GstVideoCropMeta *cmeta;

      vmeta = gst_buffer_get_video_meta (frame->output_buffer);
      /* If the buffer pool didn't add the meta already
       * we add it ourselves here */
      if (!vmeta)
        vmeta = gst_buffer_add_video_meta (frame->output_buffer,
            GST_VIDEO_FRAME_FLAG_NONE,
            dec->output_state->info.finfo->format,
            dec->info.frame_width, dec->info.frame_height);

      /* Just to be sure that the buffer pool doesn't do something
       * completely weird and we would crash later
       */
      g_assert (vmeta->format == dec->output_state->info.finfo->format);
      g_assert (vmeta->width == dec->info.frame_width);
      g_assert (vmeta->height == dec->info.frame_height);

      cmeta = gst_buffer_add_video_crop_meta (frame->output_buffer);

      /* we can do things slightly more efficient when we know that
       * downstream understands clipping */
      cmeta->x = dec->info.pic_x;
      cmeta->y = dec->info.pic_y;
      cmeta->width = dec->info.pic_width;
      cmeta->height = dec->info.pic_height;
    }
  }

  /* if only libtheora would allow us to give it a destination frame */
  GST_CAT_TRACE_OBJECT (GST_CAT_PERFORMANCE, dec,
      "doing unavoidable video frame copy");

  if (G_UNLIKELY (!gst_video_frame_map (&vframe, &dec->output_state->info,
              frame->output_buffer, GST_MAP_WRITE)))
    goto invalid_frame;

  for (comp = 0; comp < 3; comp++) {
    width =
        GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vframe.info.finfo, comp, pic_width);
    height =
        GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vframe.info.finfo, comp,
        pic_height);
    stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, comp);
    dest = GST_VIDEO_FRAME_COMP_DATA (&vframe, comp);

    src = buf[comp].data;
    src += ((height == pic_height) ? offset_y : offset_y / 2)
        * buf[comp].stride;
    src += (width == pic_width) ? offset_x : offset_x / 2;

    for (i = 0; i < height; i++) {
      memcpy (dest, src, width);

      dest += stride;
      src += buf[comp].stride;
    }
  }
  gst_video_frame_unmap (&vframe);

  return GST_FLOW_OK;
invalid_frame:
  {
    GST_DEBUG_OBJECT (dec, "could not map video frame");
    return GST_FLOW_ERROR;
  }
}
static GstFlowReturn
gst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * out_frame)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiSurfaceProxy *proxy;
  GstFlowReturn ret;
  const GstVaapiRectangle *crop_rect;
  GstVaapiVideoMeta *meta;
  guint flags, out_flags = 0;

  if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) {
    proxy = gst_video_codec_frame_get_user_data (out_frame);

    /* reconfigure if un-cropped surface resolution changed */
    if (is_surface_resolution_changed (vdec, GST_VAAPI_SURFACE_PROXY_SURFACE (proxy)))
      gst_vaapidecode_negotiate (decode);

    gst_vaapi_surface_proxy_set_destroy_notify (proxy,
        (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode));

    ret = gst_video_decoder_allocate_output_frame (vdec, out_frame);
    if (ret != GST_FLOW_OK)
      goto error_create_buffer;

    meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer);
    if (!meta)
      goto error_get_meta;
    gst_vaapi_video_meta_set_surface_proxy (meta, proxy);

    flags = gst_vaapi_surface_proxy_get_flags (proxy);
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED)
      out_flags |= GST_BUFFER_FLAG_CORRUPTED;
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) {
      out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_TFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_RFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD)
        out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD;
    }
    GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags);

#if GST_CHECK_VERSION(1,5,0)
    /* First-in-bundle flag only appeared in 1.5 dev */
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) {
      GST_BUFFER_FLAG_SET (out_frame->output_buffer,
          GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE);
    }
#endif

    crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy);
    if (crop_rect) {
      GstVideoCropMeta *const crop_meta =
          gst_buffer_add_video_crop_meta (out_frame->output_buffer);
      if (crop_meta) {
        crop_meta->x = crop_rect->x;
        crop_meta->y = crop_rect->y;
        crop_meta->width = crop_rect->width;
        crop_meta->height = crop_rect->height;
      }
    }
#if (USE_GLX || USE_EGL)
    if (decode->has_texture_upload_meta)
      gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer);
#endif
  }

  ret = gst_video_decoder_finish_frame (vdec, out_frame);
  if (ret != GST_FLOW_OK)
    goto error_commit_buffer;

  gst_video_codec_frame_unref (out_frame);
  return GST_FLOW_OK;

  /* ERRORS */
error_create_buffer:
  {
    const GstVaapiID surface_id =
        gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy));

    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to create sink buffer"),
        ("video sink failed to create video buffer for proxy'ed "
            "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)));
    gst_video_decoder_drop_frame (vdec, out_frame);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_ERROR;
  }
error_get_meta:
  {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to get vaapi video meta attached to video buffer"),
        ("Failed to get vaapi video meta attached to video buffer"));
    gst_video_decoder_drop_frame (vdec, out_frame);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_ERROR;
  }
error_commit_buffer:
  {
    GST_INFO_OBJECT (decode, "downstream element rejected the frame (%s [%d])",
        gst_flow_get_name (ret), ret);
    gst_video_codec_frame_unref (out_frame);
    return ret;
  }
}
Beispiel #7
0
static GstFlowReturn
gst_mfc_dec_fill_outbuf (GstMFCDec * self, GstBuffer * outbuf,
    struct mfc_buffer *mfc_outbuf, GstVideoCodecState * state)
{
  GstFlowReturn ret = GST_FLOW_OK;
  const guint8 *mfc_outbuf_comps[3] = { NULL, };
  gint i, j, h, w, src_stride, dst_stride;
  guint8 *dst_, *src_;
  GstVideoFrame vframe;
  Fimc *fimc = self->fimc;
  gboolean zerocopy, has_cropping;

  memset (&vframe, 0, sizeof (vframe));

  zerocopy = TRUE;
  /* FIXME: Not 100% correct, we need the memory of each
   * plane to be contiguous at least */
  if (GST_VIDEO_INFO_N_PLANES (&state->info) > gst_buffer_n_memory (outbuf)) {
    zerocopy = FALSE;
  } else {
    gint n = gst_buffer_n_memory (outbuf);

    for (i = 0; i < n; i++) {
      GstMemory *mem = gst_buffer_peek_memory (outbuf, i);

      if (!GST_MEMORY_IS_PHYSICALLY_CONTIGUOUS (mem)) {
        zerocopy = FALSE;
        break;
      }
    }
  }

  has_cropping = self->has_cropping && (self->width != self->crop_width
      || self->height != self->crop_height);

  /* We only do cropping if we do zerocopy and downstream
   * supports cropping. For non-zerocopy we can do cropping
   * more efficient.
   * We can't do cropping ourself with zerocopy because
   * FIMC returns EFAULT when queueing the destination
   * buffers
   */
  if (zerocopy && has_cropping) {
    GstVideoCropMeta *crop;

    crop = gst_buffer_add_video_crop_meta (outbuf);
    crop->x = self->crop_left;
    crop->y = self->crop_top;
    crop->width = self->crop_width;
    crop->height = self->crop_height;
  }

  if (!gst_video_frame_map (&vframe, &state->info, outbuf, GST_MAP_WRITE))
    goto frame_map_error;

  mfc_buffer_get_output_data (mfc_outbuf, (void **) &mfc_outbuf_comps[0],
      (void **) &mfc_outbuf_comps[1]);

  if (zerocopy && (has_cropping || (self->width == self->crop_width
              && self->height == self->crop_height))) {
    void *dst[3];

    if (self->mmap || !self->fimc) {
      if (!gst_mfc_dec_create_fimc (self, state))
        goto fimc_create_error;

      fimc = self->fimc;

      if (self->format == GST_VIDEO_FORMAT_NV12) {
        self->dst_stride[0] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[1] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[2] = 0;
      } else {
        self->dst_stride[0] = GST_ROUND_UP_4 (self->width);
        self->dst_stride[1] = GST_ROUND_UP_4 ((self->width + 1) / 2);
        self->dst_stride[2] = GST_ROUND_UP_4 ((self->width + 1) / 2);
      }

      if (has_cropping) {
        if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
                self->height, self->dst_stride, 0, 0, self->width,
                self->height) < 0)
          goto fimc_dst_error;
      } else {
        if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
                self->height, self->dst_stride, self->crop_left,
                self->crop_top, self->crop_width, self->crop_height) < 0)
          goto fimc_dst_error;
      }
      self->mmap = FALSE;

      if (fimc_request_dst_buffers (fimc) < 0)
        goto fimc_dst_requestbuffers_error;

      self->dst[0] = NULL;
      self->dst[1] = NULL;
      self->dst[2] = NULL;
    }

    dst[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
    dst[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
    if (self->format == GST_VIDEO_FORMAT_NV12)
      dst[2] = NULL;
    else
      dst[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

    if (fimc_convert (fimc, (void **) mfc_outbuf_comps, (void **) dst) < 0)
      goto fimc_convert_error;
  } else {
    if (!self->mmap || !self->fimc) {
      if (!gst_mfc_dec_create_fimc (self, state))
        goto fimc_create_error;

      self->dst_stride[0] = 0;
      self->dst_stride[1] = 0;
      self->dst_stride[2] = 0;
      self->mmap = TRUE;
      fimc = self->fimc;
    }

    if (!self->dst[0]) {
      if (fimc_set_dst_format (fimc, self->fimc_format, self->width,
              self->height, self->dst_stride, self->crop_left,
              self->crop_top, self->crop_width, self->crop_height) < 0)
        goto fimc_dst_error;

      if (fimc_request_dst_buffers_mmap (fimc, self->dst, self->dst_stride) < 0)
        goto fimc_dst_requestbuffers_error;
    }

    if (fimc_convert (fimc, (void **) mfc_outbuf_comps,
            (void **) self->dst) < 0)
      goto fimc_convert_error;

    switch (state->info.finfo->format) {
      case GST_VIDEO_FORMAT_RGBx:
        dst_ = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (&vframe, 0);
        src_ = self->dst[0];
        src_stride = self->dst_stride[0];
        h = GST_VIDEO_FRAME_HEIGHT (&vframe);
        w = GST_VIDEO_FRAME_WIDTH (&vframe);
        dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, 0);
        for (i = 0; i < h; i++) {
          memcpy (dst_, src_, w);
          dst_ += dst_stride;
          src_ += src_stride;
        }
        break;
      case GST_VIDEO_FORMAT_I420:
      case GST_VIDEO_FORMAT_YV12:
        for (j = 0; j < 3; j++) {
          dst_ = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (&vframe, j);
          src_ = self->dst[j];
          src_stride = self->dst_stride[j];
          h = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, j);
          w = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, j);
          dst_stride = GST_VIDEO_FRAME_COMP_STRIDE (&vframe, j);
          for (i = 0; i < h; i++) {
            memcpy (dst_, src_, w);
            dst_ += dst_stride;
            src_ += src_stride;
          }
        }
        break;
      case GST_VIDEO_FORMAT_NV12:
        for (j = 0; j < 2; j++) {
          dst_ = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, j);
          src_ = self->dst[j];
          src_stride = self->dst_stride[j];
          h = GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, j);
          w = GST_VIDEO_FRAME_COMP_WIDTH (&vframe, j) * (j == 0 ? 1 : 2);
          dst_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, j);
          for (i = 0; i < h; i++) {
            memcpy (dst_, src_, w);
            dst_ += dst_stride;
            src_ += src_stride;
          }
        }
        break;
      default:
        g_assert_not_reached ();
        break;
    }
  }

done:
  if (vframe.buffer)
    gst_video_frame_unmap (&vframe);

  return ret;

frame_map_error:
  {
    GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_create_error:
  {
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_dst_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to set FIMC destination parameters"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_dst_requestbuffers_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to request FIMC destination buffers"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fimc_convert_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to convert via FIMC"), (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
static GstFlowReturn
gst_mfxpostproc_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstMfxPostproc *const vpp = GST_MFXPOSTPROC (trans);
  GstMfxVideoMeta *inbuf_meta, *outbuf_meta;
  GstMfxSurface *surface, *out_surface;
  GstMfxFilterStatus status = GST_MFX_FILTER_STATUS_SUCCESS;
  GstFlowReturn ret = GST_FLOW_OK;
  GstBuffer *buf = NULL;
  GstMfxRectangle *crop_rect = NULL;
  GstClockTime timestamp;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);

  ret = gst_mfx_plugin_base_get_input_buffer (GST_MFX_PLUGIN_BASE (vpp),
          inbuf, &buf);
  if (GST_FLOW_OK != ret)
    return ret;

  inbuf_meta = gst_buffer_get_mfx_video_meta (buf);
  surface = gst_mfx_video_meta_get_surface (inbuf_meta);
  if (!surface)
    goto error_create_surface;

  do {
    if (vpp->flags & GST_MFX_POSTPROC_FLAG_FRC) {
      if (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE != status)
        gst_buffer_replace (&buf, NULL);
      buf = create_output_buffer (vpp);
      if (!buf)
        goto error_create_buffer;
    }

    status = gst_mfx_filter_process (vpp->filter, surface, &out_surface);
    if (GST_MFX_FILTER_STATUS_SUCCESS != status
        && GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE != status
        && GST_MFX_FILTER_STATUS_ERROR_MORE_DATA != status)
      goto error_process_vpp;

    if (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE == status)
      outbuf_meta = gst_buffer_get_mfx_video_meta (buf);
    else
      outbuf_meta = gst_buffer_get_mfx_video_meta (outbuf);

    if (!outbuf_meta)
      goto error_create_meta;

    gst_mfx_video_meta_set_surface (outbuf_meta, out_surface);
    crop_rect = gst_mfx_surface_get_crop_rect (out_surface);
    if (crop_rect) {
      GstVideoCropMeta *const crop_meta =
          gst_buffer_add_video_crop_meta (outbuf);
      if (crop_meta) {
        crop_meta->x = crop_rect->x;
        crop_meta->y = crop_rect->y;
        crop_meta->width = crop_rect->width;
        crop_meta->height = crop_rect->height;
      }
    }

    if (GST_MFX_FILTER_STATUS_ERROR_MORE_DATA == status) {
      gst_buffer_unref (buf);
      return GST_BASE_TRANSFORM_FLOW_DROPPED;
    }

    if (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE == status) {
      GST_BUFFER_TIMESTAMP (buf) = timestamp;
      GST_BUFFER_DURATION (buf) = vpp->field_duration;
      timestamp += vpp->field_duration;
      ret = gst_pad_push (trans->srcpad, buf);
    }
    else {
      if (vpp->flags & GST_MFX_POSTPROC_FLAG_FRC) {
        GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
        GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
      }
      else {
        gst_buffer_copy_into (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
      }
    }
  } while (GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE == status
           && GST_FLOW_OK == ret);

  gst_mfx_surface_dequeue(surface);

#if GST_CHECK_VERSION(1,8,0)
  gst_mfx_plugin_base_export_dma_buffer (GST_MFX_PLUGIN_BASE (vpp), outbuf);
#endif // GST_CHECK_VERSION

  gst_buffer_unref (buf);
  return ret;
  /* ERRORS */
error_create_buffer:
  {
    GST_ERROR ("failed to output buffer");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
error_create_meta:
  {
    GST_ERROR ("failed to create new output buffer meta");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
error_create_surface:
  {
    GST_ERROR ("failed to create surface surface from buffer");
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
error_process_vpp:
  {
    GST_ERROR ("failed to apply VPP (error %d)", status);
    gst_buffer_unref (buf);
    return GST_FLOW_ERROR;
  }
}