static GstFlowReturn
gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
    GstVideoFrame * frame, GstClockTimeDiff deadline)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (base_video_decoder);

  GstH264Frame *h264_frame;
  GstH264Slice *slice;
  GstH264Picture *pic G_GNUC_UNUSED;
  GstH264Sequence *seq G_GNUC_UNUSED;

  GstFlowReturn ret;
  GstVdpVideoBuffer *outbuf;
  VdpPictureInfoH264 info;
  VdpBitstreamBuffer *bufs;
  guint n_bufs;

  GST_DEBUG ("handle_frame");

  h264_frame = GST_H264_FRAME_CAST (frame);

  slice = &h264_frame->slice_hdr;
  pic = slice->picture;
  seq = pic->sequence;


  if (slice->nal_unit.IdrPicFlag) {
    ret = gst_vdp_h264_dec_idr (h264_dec, h264_frame);

    if (ret == GST_FLOW_OK)
      h264_dec->got_idr = TRUE;
    else {
      gst_base_video_decoder_skip_frame (base_video_decoder, frame);
      return GST_FLOW_OK;
    }
  }

  /* check if we've got a IDR frame yet */
  if (!h264_dec->got_idr) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return GST_FLOW_OK;
  }


  gst_vdp_h264_dec_init_frame_info (h264_dec, h264_frame);

  info = gst_vdp_h264_dec_fill_info (h264_dec, h264_frame);
  bufs = gst_vdp_h264_dec_create_bitstream_buffers (h264_dec, h264_frame,
      &n_bufs);

  ret = gst_vdp_decoder_render (GST_VDP_DECODER (h264_dec),
      (VdpPictureInfo *) & info, n_bufs, bufs, &outbuf);
  g_free (bufs);

  if (ret != GST_FLOW_OK) {
    gst_base_video_decoder_skip_frame (base_video_decoder, frame);
    return ret;
  }

  frame->src_buffer = GST_BUFFER_CAST (outbuf);


  /* DPB handling */
  if (slice->nal_unit.ref_idc != 0 && !slice->nal_unit.IdrPicFlag) {
    if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
      GstH264RefPicMarking *marking;
      guint i;

      marking = slice->dec_ref_pic_marking.ref_pic_marking;
      for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) {

        switch (marking[i].memory_management_control_operation) {
          case 1:
          {
            guint16 pic_num;

            pic_num = slice->frame_num -
                (marking[i].difference_of_pic_nums_minus1 + 1);
            gst_h264_dpb_mark_short_term_unused (h264_dec->dpb, pic_num);
            break;
          }

          case 2:
          {
            gst_h264_dpb_mark_long_term_unused (h264_dec->dpb,
                marking[i].long_term_pic_num);
            break;
          }

          case 3:
          {
            guint16 pic_num;

            pic_num = slice->frame_num -
                (marking[i].difference_of_pic_nums_minus1 + 1);
            gst_h264_dpb_mark_long_term (h264_dec->dpb, pic_num,
                marking[i].long_term_frame_idx);
            break;
          }

          case 4:
          {
            g_object_set (h264_dec->dpb, "max-longterm-frame-idx",
                marking[i].max_long_term_frame_idx_plus1 - 1, NULL);
            break;
          }

          case 5:
          {
            gst_h264_dpb_mark_all_unused (h264_dec->dpb);
            g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL);
            break;
          }

          default:
            break;
        }
      }
    } else
      gst_h264_dpb_mark_sliding (h264_dec->dpb);
  }

  return gst_h264_dpb_add (h264_dec->dpb, h264_frame);
}
static GstFlowReturn
gst_vdp_h264_dec_handle_dpb (GstVdpH264Dec * h264_dec,
    GstH264Frame * h264_frame, GstH264SliceHdr * slice)
{
  if (slice->nalu_ref_idc != 0 && slice->slice_type != GST_H264_NAL_SLICE_IDR) {
    if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
      GstH264RefPicMarking *marking;
      guint i;

      marking = slice->dec_ref_pic_marking.ref_pic_marking;
      for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) {

        switch (marking[i].memory_management_control_operation) {
          case 1:
          {
            guint16 pic_num;

            pic_num = slice->frame_num -
                (marking[i].difference_of_pic_nums_minus1 + 1);
            gst_h264_dpb_mark_short_term_unused (h264_dec->dpb, pic_num);
            break;
          }

          case 2:
          {
            gst_h264_dpb_mark_long_term_unused (h264_dec->dpb,
                marking[i].long_term_pic_num);
            break;
          }

          case 3:
          {
            guint16 pic_num;

            pic_num = slice->frame_num -
                (marking[i].difference_of_pic_nums_minus1 + 1);
            gst_h264_dpb_mark_long_term (h264_dec->dpb, pic_num,
                marking[i].long_term_frame_idx);
            break;
          }

          case 4:
          {
            g_object_set (h264_dec->dpb, "max-longterm-frame-idx",
                marking[i].max_long_term_frame_idx_plus1 - 1, NULL);
            break;
          }

          case 5:
          {
            gst_h264_dpb_mark_all_unused (h264_dec->dpb);
            g_object_set (h264_dec->dpb, "max-longterm-frame-idx", -1, NULL);
            break;
          }

          default:
            break;
        }
      }
    } else
      gst_h264_dpb_mark_sliding (h264_dec->dpb);
  }

  return gst_h264_dpb_add (h264_dec->dpb, h264_frame);

}