Пример #1
0
static VdpPictureInfoMPEG4Part2
gst_vdp_mpeg4_dec_fill_info (GstVdpMpeg4Dec * mpeg4_dec,
    GstMpeg4Frame * mpeg4_frame, Mpeg4VideoObjectPlane * vop)
{
  Mpeg4VideoObjectLayer *vol;
  VdpPictureInfoMPEG4Part2 info;

  vol = &mpeg4_dec->vol;

  info.forward_reference = VDP_INVALID_HANDLE;
  info.backward_reference = VDP_INVALID_HANDLE;

  /* forward reference */
  if (vop->coding_type != I_VOP && mpeg4_dec->f_frame) {
    info.forward_reference =
        GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->
            f_frame)->src_buffer)->surface;
  }

  if (vop->coding_type == B_VOP) {
    guint32 trd_time, trb_time;

    trd_time = mpeg4_dec->b_frame->vop_time - mpeg4_dec->f_frame->vop_time;
    trb_time = mpeg4_frame->vop_time - mpeg4_dec->f_frame->vop_time;

    info.trd[0] = trd_time;
    info.trb[0] = trb_time;

    info.trd[1] = round ((double) trd_time / (double) mpeg4_dec->tframe);
    info.trb[1] = round ((double) trb_time / (double) mpeg4_dec->tframe);

    /* backward reference */
    if (mpeg4_dec->b_frame) {
      info.backward_reference =
          GST_VDP_VIDEO_BUFFER (GST_VIDEO_FRAME (mpeg4_dec->
              b_frame)->src_buffer)->surface;
    }
  }

  memcpy (info.intra_quantizer_matrix, vol->intra_quant_mat, 64);
  memcpy (info.non_intra_quantizer_matrix, vol->non_intra_quant_mat, 64);

  info.vop_time_increment_resolution = vol->vop_time_increment_resolution;
  info.resync_marker_disable = vol->resync_marker_disable;
  info.interlaced = vol->interlaced;
  info.quant_type = vol->quant_type;
  info.quarter_sample = vol->quarter_sample;
  /* FIXME: support short video header */
  info.short_video_header = FALSE;

  info.vop_coding_type = vop->coding_type;
  info.vop_fcode_forward = vop->fcode_forward;
  info.vop_fcode_backward = vop->fcode_backward;
  info.rounding_control = vop->rounding_type;
  info.alternate_vertical_scan_flag = vop->alternate_vertical_scan_flag;
  info.top_field_first = vop->top_field_first;

  return info;
}
Пример #2
0
static GstFlowReturn
gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, GstBuffer ** outbuf)
{
    GstFlowReturn ret;

    ret = gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
            GST_PAD_CAPS (mpeg_dec->src), outbuf);
    if (ret != GST_FLOW_OK)
        return ret;

    if (!mpeg_dec->device) {
        GstVdpDevice *device;
        VdpStatus status;

        device = mpeg_dec->device =
                     g_object_ref (GST_VDP_VIDEO_BUFFER (*outbuf)->device);

        status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
                                             mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
        if (status != VDP_STATUS_OK) {
            GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
                               ("Could not create vdpau decoder"),
                               ("Error returned from vdpau was: %s",
                                device->vdp_get_error_string (status)));
            ret = GST_FLOW_ERROR;
        }
    }

    return ret;
}
static GstFlowReturn
gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));

  GstClockTime qostime;
  GstFlowReturn ret = GST_FLOW_OK;
  GError *err;

  GST_DEBUG ("chain");

  /* can only do QoS if the segment is in TIME */
  if (vpp->segment.format != GST_FORMAT_TIME)
    goto no_qos;

  /* QOS is done on the running time of the buffer, get it now */
  qostime = gst_segment_to_running_time (&vpp->segment, GST_FORMAT_TIME,
      GST_BUFFER_TIMESTAMP (buffer));

  if (qostime != -1) {
    gboolean need_skip;
    GstClockTime earliest_time;

    /* lock for getting the QoS parameters that are set (in a different thread)
     * with the QOS events */
    GST_OBJECT_LOCK (vpp);
    earliest_time = vpp->earliest_time;
    /* check for QoS, don't perform conversion for buffers
     * that are known to be late. */
    need_skip = GST_CLOCK_TIME_IS_VALID (earliest_time) && qostime != -1 &&
        qostime <= earliest_time;

    GST_OBJECT_UNLOCK (vpp);

    if (need_skip) {
      GST_DEBUG_OBJECT (vpp, "skipping transform: qostime %"
          GST_TIME_FORMAT " <= %" GST_TIME_FORMAT,
          GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
      /* mark discont for next buffer */
      vpp->discont = TRUE;
      gst_buffer_unref (buffer);
      return GST_FLOW_OK;
    }
  }

no_qos:

  if (vpp->discont) {
    GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
    vpp->discont = FALSE;
  }

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (vpp, "Received discont buffer");
    gst_vdp_vpp_flush (vpp);
  }

  if (!vpp->native_input) {
    GstVdpVideoBuffer *video_buf;

    err = NULL;
    video_buf =
        (GstVdpVideoBuffer *) gst_vdp_buffer_pool_get_buffer (vpp->vpool, &err);
    if (G_UNLIKELY (!video_buf))
      goto video_buf_error;

    if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc,
            vpp->width, vpp->height)) {
      gst_buffer_unref (GST_BUFFER (video_buf));
      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
          ("Couldn't upload YUV data to vdpau"), (NULL));
      ret = GST_FLOW_ERROR;
      goto error;
    }

    gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer,
        GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);

    gst_buffer_unref (buffer);
    buffer = GST_BUFFER (video_buf);
  }

  if (G_UNLIKELY (vpp->mixer == VDP_INVALID_HANDLE)) {
    ret = gst_vdp_vpp_create_mixer (vpp);
    if (ret != GST_FLOW_OK)
      goto error;
  }

  gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));

  ret = gst_vdp_vpp_drain (vpp);

done:
  gst_object_unref (vpp);

  return ret;

error:
  gst_buffer_unref (buffer);
  goto done;

video_buf_error:
  gst_buffer_unref (GST_BUFFER (buffer));
  gst_vdp_vpp_post_error (vpp, err);
  ret = GST_FLOW_ERROR;
  goto done;
}
Пример #4
0
static GstFlowReturn
gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame, GstClockTimeDiff deadline)
{
  GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (base_video_decoder);

  VdpPictureInfoMPEG1Or2 *info;
  GstVdpMpegFrame *mpeg_frame;

  GstFlowReturn ret = GST_FLOW_OK;
  VdpBitstreamBuffer vbit[1];
  GstVdpVideoBuffer *outbuf;

  /* MPEG_PACKET_SEQUENCE */
  mpeg_frame = GST_VDP_MPEG_FRAME (frame);
  if (mpeg_frame->seq) {
    ret = gst_vdp_mpeg_dec_handle_sequence (mpeg_dec, mpeg_frame->seq,
        mpeg_frame->seq_ext);
    if (ret != GST_FLOW_OK) {
      gst_base_video_decoder_skip_frame (base_video_decoder, frame);
      return ret;
    }
  }

  if (mpeg_dec->state == GST_VDP_MPEG_DEC_STATE_NEED_SEQUENCE) {
    GST_DEBUG_OBJECT (mpeg_dec, "Drop frame since we haven't found a "
        "MPEG_PACKET_SEQUENCE yet");

    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }

  /* MPEG_PACKET_PICTURE */
  if (mpeg_frame->pic)
    gst_vdp_mpeg_dec_handle_picture (mpeg_dec, mpeg_frame->pic);

  /* MPEG_PACKET_EXT_PICTURE_CODING */
  if (mpeg_frame->pic_ext)
    gst_vdp_mpeg_dec_handle_picture_coding (mpeg_dec, mpeg_frame->pic_ext,
        frame);

  /* MPEG_PACKET_GOP */
  if (mpeg_frame->gop)
    gst_vdp_mpeg_dec_handle_gop (mpeg_dec, mpeg_frame->gop);

  /* MPEG_PACKET_EXT_QUANT_MATRIX */
  if (mpeg_frame->qm_ext)
    gst_vdp_mpeg_dec_handle_quant_matrix (mpeg_dec, mpeg_frame->qm_ext);


  info = &mpeg_dec->vdp_info;

  info->slice_count = mpeg_frame->n_slices;

  /* check if we can decode the frame */
  if (info->picture_coding_type != I_FRAME
      && info->backward_reference == VDP_INVALID_HANDLE) {
    GST_DEBUG_OBJECT (mpeg_dec,
        "Drop frame since we haven't got an I_FRAME yet");

    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }
  if (info->picture_coding_type == B_FRAME
      && info->forward_reference == VDP_INVALID_HANDLE) {
    GST_DEBUG_OBJECT (mpeg_dec,
        "Drop frame since we haven't got two non B_FRAMES yet");

    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }


  if (info->picture_coding_type != B_FRAME) {
    if (info->backward_reference != VDP_INVALID_HANDLE) {
      ret = gst_base_video_decoder_finish_frame (base_video_decoder,
          mpeg_dec->b_frame);
    }

    if (info->forward_reference != VDP_INVALID_HANDLE) {
      gst_video_frame_unref (mpeg_dec->f_frame);
      info->forward_reference = VDP_INVALID_HANDLE;
    }

    info->forward_reference = info->backward_reference;
    mpeg_dec->f_frame = mpeg_dec->b_frame;

    info->backward_reference = VDP_INVALID_HANDLE;
  }

  if (ret != GST_FLOW_OK) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return ret;
  }

  /* decode */
  vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
  vbit[0].bitstream = GST_BUFFER_DATA (mpeg_frame->slices);
  vbit[0].bitstream_bytes = GST_BUFFER_SIZE (mpeg_frame->slices);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (mpeg_dec),
      (VdpPictureInfo *) info, 1, vbit, &outbuf);
  if (ret != GST_FLOW_OK)
    return ret;

  frame->src_buffer = GST_BUFFER_CAST (outbuf);

  if (info->picture_coding_type == B_FRAME) {
    ret = gst_base_video_decoder_finish_frame (base_video_decoder, frame);
  } else {
    info->backward_reference = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
    mpeg_dec->b_frame = gst_video_frame_ref (frame);
  }

  return ret;
}
Пример #5
0
static GstFlowReturn
gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
{
  GstVdpVideoPostProcess *vpp =
      GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));

  GstFlowReturn ret = GST_FLOW_OK;

  GstVdpPicture current_pic;

  guint32 video_surfaces_past_count;
  VdpVideoSurface video_surfaces_past[MAX_PICTURES];

  guint32 video_surfaces_future_count;
  VdpVideoSurface video_surfaces_future[MAX_PICTURES];

  if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
    GST_DEBUG_OBJECT (vpp, "Received discont buffer");
    gst_vdp_vpp_flush (vpp);
  }

  gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));

  while (gst_vdp_vpp_get_next_picture (vpp,
          &current_pic,
          &video_surfaces_past_count, video_surfaces_past,
          &video_surfaces_future_count, video_surfaces_future)) {
    GstVdpOutputBuffer *outbuf;

    GstStructure *structure;
    GstVideoRectangle src_r = { 0, }
    , dest_r = {
    0,};
    gint par_n, par_d;
    VdpRect rect;

    GstVdpDevice *device;
    VdpStatus status;

    ret =
        gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad),
        &outbuf);
    if (ret != GST_FLOW_OK)
      break;

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0);
    if (!gst_structure_get_int (structure, "width", &src_r.w) ||
        !gst_structure_get_int (structure, "height", &src_r.h))
      goto invalid_caps;

    if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n,
            &par_d)) {
      gint new_width;

      new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d);
      src_r.x += (src_r.w - new_width) / 2;
      src_r.w = new_width;
    }

    structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
    if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
        !gst_structure_get_int (structure, "height", &dest_r.h))
      goto invalid_caps;

    if (vpp->force_aspect_ratio) {
      GstVideoRectangle res_r;

      gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
      rect.x0 = res_r.x;
      rect.x1 = res_r.w + res_r.x;
      rect.y0 = res_r.y;
      rect.y1 = res_r.h + res_r.y;
    } else {
      rect.x0 = 0;
      rect.x1 = dest_r.w;
      rect.y0 = 0;
      rect.y1 = dest_r.h;
    }

    device = vpp->device;
    status =
        device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
        current_pic.structure, video_surfaces_past_count, video_surfaces_past,
        current_pic.buf->surface, video_surfaces_future_count,
        video_surfaces_future, NULL, outbuf->surface, NULL, &rect, 0, NULL);
    if (status != VDP_STATUS_OK) {
      GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
          ("Could not post process frame"),
          ("Error returned from vdpau was: %s",
              device->vdp_get_error_string (status)));
      ret = GST_FLOW_ERROR;
      goto done;
    }

    GST_BUFFER_TIMESTAMP (outbuf) = current_pic.timestamp;
    if (gst_vdp_vpp_is_interlaced (vpp))
      GST_BUFFER_DURATION (outbuf) = vpp->field_duration;
    else
      GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (current_pic.buf);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_DISCONT))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_PREROLL))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_PREROLL);

    if (GST_BUFFER_FLAG_IS_SET (current_pic.buf, GST_BUFFER_FLAG_GAP))
      GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);

    ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf));
    if (ret != GST_FLOW_OK)
      break;

    continue;

  invalid_caps:
    gst_buffer_unref (GST_BUFFER (outbuf));
    ret = GST_FLOW_ERROR;
    break;
  }

done:
  gst_object_unref (vpp);

  return ret;
}
Пример #6
0
static GstFlowReturn
gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
                         GstClockTime timestamp, gint64 size)
{
    VdpPictureInfoMPEG1Or2 *info;
    GstBuffer *buffer;
    GstBuffer *outbuf;
    VdpVideoSurface surface;
    GstVdpDevice *device;
    VdpBitstreamBuffer vbit[1];
    VdpStatus status;

    info = &mpeg_dec->vdp_info;

    if (info->picture_coding_type != B_FRAME) {
        if (info->backward_reference != VDP_INVALID_HANDLE) {
            gst_buffer_ref (mpeg_dec->b_buffer);
            gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
                                                GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
        }

        if (info->forward_reference != VDP_INVALID_HANDLE) {
            gst_buffer_unref (mpeg_dec->f_buffer);
            info->forward_reference = VDP_INVALID_HANDLE;
        }

        info->forward_reference = info->backward_reference;
        mpeg_dec->f_buffer = mpeg_dec->b_buffer;

        info->backward_reference = VDP_INVALID_HANDLE;
    }

    if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) {
        gst_adapter_clear (mpeg_dec->adapter);
        return GST_FLOW_ERROR;
    }

    device = GST_VDP_VIDEO_BUFFER (outbuf)->device;

    if (info->forward_reference != VDP_INVALID_HANDLE &&
            info->picture_coding_type != I_FRAME)
        gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf),
                                            GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer));

    if (info->backward_reference != VDP_INVALID_HANDLE
            && info->picture_coding_type == B_FRAME)
        gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf),
                                            GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));

    GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
    GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration;
    GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr;
    GST_BUFFER_SIZE (outbuf) = size;

    if (info->picture_coding_type == I_FRAME)
        GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
    else
        GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);

    if (info->top_field_first)
        GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
    else
        GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);

    buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
                                      gst_adapter_available (mpeg_dec->adapter));

    surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;

    vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
    vbit[0].bitstream = GST_BUFFER_DATA (buffer);
    vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer);

    status = device->vdp_decoder_render (mpeg_dec->decoder, surface,
                                         (VdpPictureInfo *) info, 1, vbit);
    gst_buffer_unref (buffer);
    info->slice_count = 0;

    if (status != VDP_STATUS_OK) {
        GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
                           ("Could not decode"),
                           ("Error returned from vdpau was: %s",
                            device->vdp_get_error_string (status)));

        gst_buffer_unref (GST_BUFFER (outbuf));

        return GST_FLOW_ERROR;
    }

    if (info->picture_coding_type == B_FRAME) {
        gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
                                            GST_VDP_VIDEO_BUFFER (outbuf));
    } else {
        info->backward_reference = surface;
        mpeg_dec->b_buffer = GST_BUFFER (outbuf);
    }

    return GST_FLOW_OK;
}
Пример #7
0
GstFlowReturn
gst_vdp_video_yuv_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
  GstVdpDevice *device;
  VdpVideoSurface surface;

  device = GST_VDP_VIDEO_BUFFER (inbuf)->device;
  surface = GST_VDP_VIDEO_BUFFER (inbuf)->surface;

  switch (video_yuv->format) {
    case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
    {
      VdpStatus status;
      guint8 *data[3];
      guint32 stride[3];

      data[0] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          0, video_yuv->width, video_yuv->height);
      data[1] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          2, video_yuv->width, video_yuv->height);
      data[2] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
          1, video_yuv->width, video_yuv->height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          0, video_yuv->width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          2, video_yuv->width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
          1, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('I', '4', '2', '0'):
    {
      VdpStatus status;
      guint8 *data[3];
      guint32 stride[3];

      data[0] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          0, video_yuv->width, video_yuv->height);
      data[1] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          2, video_yuv->width, video_yuv->height);
      data[2] = GST_BUFFER_DATA (outbuf) +
          gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
          1, video_yuv->width, video_yuv->height);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          0, video_yuv->width);
      stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          2, video_yuv->width);
      stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
          1, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
    {
      VdpStatus status;
      guint8 *data[2];
      guint32 stride[2];

      data[0] = GST_BUFFER_DATA (outbuf);
      data[1] = GST_BUFFER_DATA (outbuf) + video_yuv->width * video_yuv->height;

      stride[0] = video_yuv->width;
      stride[1] = video_yuv->width;

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
    {
      VdpStatus status;
      guint8 *data[1];
      guint32 stride[1];

      data[0] = GST_BUFFER_DATA (outbuf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
          0, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_UYVY, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
    {
      VdpStatus status;
      guint8 *data[1];
      guint32 stride[1];

      data[0] = GST_BUFFER_DATA (outbuf);

      stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
          0, video_yuv->width);

      GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
      status =
          device->vdp_video_surface_get_bits_ycbcr (surface,
          VDP_YCBCR_FORMAT_YUYV, (void *) data, stride);
      GST_LOG_OBJECT (video_yuv,
          "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
      if (G_UNLIKELY (status != VDP_STATUS_OK)) {
        GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
            ("Couldn't get data from vdpau"),
            ("Error returned from vdpau was: %s",
                device->vdp_get_error_string (status)));
        return GST_FLOW_ERROR;
      }
      break;
    }
    default:
      break;
  }

  gst_buffer_copy_metadata (outbuf, inbuf,
      GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);

  GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT,
      GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));

  return GST_FLOW_OK;
}