static GstVaapiDecoderStatus
decode_slice (GstVaapiDecoderVp8 * decoder, GstVaapiPicture * picture,
    const guchar * buf, guint buf_size)
{
  GstVaapiSlice *slice;

  slice = GST_VAAPI_SLICE_NEW (VP8, decoder, buf, buf_size);
  if (!slice) {
    GST_ERROR ("failed to allocate slice");
    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
  }

  if (!fill_slice (decoder, slice)) {
    gst_vaapi_mini_object_unref (GST_VAAPI_MINI_OBJECT (slice));
    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
  }

  gst_vaapi_picture_add_slice (GST_VAAPI_PICTURE_CAST (picture), slice);
  return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_scan (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg)
{
  GstVaapiDecoderJpegPrivate *const priv = &decoder->priv;
  GstVaapiPicture *const picture = priv->current_picture;
  GstVaapiSlice *slice;
  VASliceParameterBufferJPEGBaseline *slice_param;
  GstJpegScanHdr scan_hdr;
  guint scan_hdr_size, scan_data_size;
  guint i, h_max, v_max, mcu_width, mcu_height;

  if (!VALID_STATE (decoder, GOT_SOF))
    return GST_VAAPI_DECODER_STATUS_SUCCESS;

  scan_hdr_size = (seg->data[seg->offset] << 8) | seg->data[seg->offset + 1];
  scan_data_size = seg->size - scan_hdr_size;

  memset (&scan_hdr, 0, sizeof (scan_hdr));
  if (!gst_jpeg_segment_parse_scan_header (seg, &scan_hdr)) {
    GST_ERROR ("failed to parse scan header");
    return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
  }

  slice = GST_VAAPI_SLICE_NEW (JPEGBaseline, decoder,
      seg->data + seg->offset + scan_hdr_size, scan_data_size);
  if (!slice) {
    GST_ERROR ("failed to allocate slice");
    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
  }
  gst_vaapi_picture_add_slice (picture, slice);

  if (!VALID_STATE (decoder, GOT_HUF_TABLE))
    gst_jpeg_get_default_huffman_tables (&priv->huf_tables);

  // Update VA Huffman table if it changed for this scan
  if (huffman_tables_updated (&priv->huf_tables)) {
    slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW (JPEGBaseline, decoder);
    if (!slice->huf_table) {
      GST_ERROR ("failed to allocate Huffman tables");
      huffman_tables_reset (&priv->huf_tables);
      return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }
    fill_huffman_table (slice->huf_table, &priv->huf_tables);
    huffman_tables_reset (&priv->huf_tables);
  }

  slice_param = slice->param;
  slice_param->num_components = scan_hdr.num_components;
  for (i = 0; i < scan_hdr.num_components; i++) {
    slice_param->components[i].component_selector =
        scan_hdr.components[i].component_selector;
    slice_param->components[i].dc_table_selector =
        scan_hdr.components[i].dc_selector;
    slice_param->components[i].ac_table_selector =
        scan_hdr.components[i].ac_selector;
  }
  slice_param->restart_interval = priv->mcu_restart;
  slice_param->slice_horizontal_position = 0;
  slice_param->slice_vertical_position = 0;

  get_max_sampling_factors (&priv->frame_hdr, &h_max, &v_max);
  mcu_width = 8 * h_max;
  mcu_height = 8 * v_max;

  if (scan_hdr.num_components == 1) {   // Non-interleaved
    const guint Csj = slice_param->components[0].component_selector;
    const GstJpegFrameComponent *const fcp =
        get_component (&priv->frame_hdr, Csj);

    if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) {
      GST_ERROR ("failed to validate image component %u", Csj);
      return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER;
    }
    mcu_width /= fcp->horizontal_factor;
    mcu_height /= fcp->vertical_factor;
  }
  slice_param->num_mcus =
      ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) *
      ((priv->frame_hdr.height + mcu_height - 1) / mcu_height);

  priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
  return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
{
    GstVaapiDecoderVC1Private * const priv = decoder->priv;
    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
    GstVC1AdvancedSeqHdr *advanced = &seq_hdr->advanced;
    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
    GstVC1ParserResult result;
    GstVaapiPicture *picture;
    GstVaapiSlice *slice;
    GstVaapiDecoderStatus status;
    VASliceParameterBufferVC1 *slice_param;
    GstClockTime pts;
    gint32 poc;

    status = ensure_context(decoder);
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
        GST_DEBUG("failed to reset context");
        return status;
    }

    if (priv->current_picture ) {
        if(!decode_current_picture(decoder))
            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }

    picture = GST_VAAPI_PICTURE_NEW(VC1, decoder);
    if (!picture) {
        GST_DEBUG("failed to allocate picture");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }

    if (!gst_vc1_bitplanes_ensure_size(priv->bitplanes, seq_hdr)) {
        GST_DEBUG("failed to allocate bitplanes");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }

    memset(frame_hdr, 0, sizeof(*frame_hdr));
    result = gst_vc1_parse_frame_header(
        rbdu->data + rbdu->offset,
        rbdu->size,
        frame_hdr,
        seq_hdr,
        priv->bitplanes
    );
    if (result != GST_VC1_PARSER_OK) {
        GST_DEBUG("failed to parse frame layer");
        return get_status(result);
    }

    switch (frame_hdr->ptype) {
    case GST_VC1_PICTURE_TYPE_I:
        picture->type   = GST_VAAPI_PICTURE_TYPE_I;
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
        break;
    case GST_VC1_PICTURE_TYPE_SKIPPED:
    case GST_VC1_PICTURE_TYPE_P:
        picture->type   = GST_VAAPI_PICTURE_TYPE_P;
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
        break;
    case GST_VC1_PICTURE_TYPE_B:
        picture->type   = GST_VAAPI_PICTURE_TYPE_B;
        priv->successive_bfrm_cnt += 1;

        break;
    case GST_VC1_PICTURE_TYPE_BI:
        picture->type   = GST_VAAPI_PICTURE_TYPE_BI;
        priv->successive_bfrm_cnt += 1;
        break;
    default:
        GST_DEBUG("unsupported picture type %d", frame_hdr->ptype);
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }

    /*As far as dpb is only storing the reference pictures, always increesing poc should work fine. 
      But we should explictly calculate the pts of frames*/
    
    if(!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
        picture->poc = priv->frm_cnt - 1;
    else
        picture->poc = priv->frm_cnt;

    gst_vaapi_decoder_get_framerate(GST_VAAPI_DECODER_CAST(decoder), &priv->fps_n, &priv->fps_d);
    pts =  gst_util_uint64_scale(picture->poc,
                                 GST_SECOND * priv->fps_d, priv->fps_n);

    if(GST_VAAPI_PICTURE_IS_REFERENCE(picture)){
        if (priv->successive_bfrm_cnt) {
            poc = priv->frm_cnt - priv->successive_bfrm_cnt - 1;
            pts = gst_util_uint64_scale(picture->poc,
                                 GST_SECOND * priv->fps_d, priv->fps_n);
            gst_vaapi_dpb_reset_pts (priv->dpb, poc, pts);
            priv->successive_bfrm_cnt = 0;
        }
    }

    picture->pts = pts;
    priv->frm_cnt++;

    if (!fill_picture(decoder, picture))
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    slice = GST_VAAPI_SLICE_NEW(
        VC1,
        decoder,
        ebdu->data + ebdu->sc_offset,
        ebdu->size + ebdu->offset - ebdu->sc_offset
    );
    if (!slice) {
        GST_DEBUG("failed to allocate slice");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }
    gst_vaapi_picture_add_slice(picture, slice);

    /* Fill in VASliceParameterBufferVC1 */
    slice_param                            = slice->param;
    slice_param->macroblock_offset         = 8 * (ebdu->offset - ebdu->sc_offset) + frame_hdr->header_size;
    slice_param->slice_vertical_position   = 0;

    gst_vaapi_picture_replace(&priv->current_picture, picture);
    gst_vaapi_picture_unref(picture);

    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_scan(
    GstVaapiDecoderJpeg *decoder,
    guchar              *scan_header,
    guint                scan_header_size,
    guchar              *scan_data,
    guint                scan_data_size)
{
    GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
    GstVaapiPicture *picture = priv->current_picture;
    VASliceParameterBufferJPEGBaseline *slice_param;
    GstVaapiSlice *gst_slice;
    guint total_h_samples, total_v_samples;
    GstJpegScanHdr  scan_hdr;
    guint i;

    if (!picture) {
        GST_ERROR("There is no VAPicture before decoding scan.");
        return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE;
    }

    if (!fill_quantization_table(decoder, picture)) {
        GST_ERROR("failed to fill in quantization table");
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }

    if (!fill_huffman_table(decoder, picture)) {
        GST_ERROR("failed to fill in huffman table");
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }

    memset(&scan_hdr, 0, sizeof(scan_hdr));
    if (!gst_jpeg_parse_scan_hdr(&scan_hdr, scan_header, scan_header_size, 0)) {
        GST_DEBUG("Jpeg parsed scan failed.");
        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
    }

    gst_slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder, scan_data, scan_data_size);
    gst_vaapi_picture_add_slice(picture, gst_slice);

    slice_param = gst_slice->param;
    slice_param->num_components = scan_hdr.num_components;
    for (i = 0; i < scan_hdr.num_components; i++) {
        slice_param->components[i].component_selector =
            scan_hdr.components[i].component_selector;
        slice_param->components[i].dc_table_selector =
            scan_hdr.components[i].dc_selector;
        slice_param->components[i].ac_table_selector =
            scan_hdr.components[i].ac_selector;
    }
    slice_param->restart_interval = priv->mcu_restart;
    if (scan_hdr.num_components == 1) { /*non-interleaved*/
        slice_param->slice_horizontal_position = 0;
        slice_param->slice_vertical_position = 0;
        /* Y mcu numbers*/
        if (slice_param->components[0].component_selector == priv->frame_hdr.components[0].identifier) {
            slice_param->num_mcus = (priv->frame_hdr.width/8)*(priv->frame_hdr.height/8);
        } else { /*Cr, Cb mcu numbers*/
            slice_param->num_mcus = (priv->frame_hdr.width/16)*(priv->frame_hdr.height/16);
        }
    } else { /* interleaved */
        slice_param->slice_horizontal_position = 0;
        slice_param->slice_vertical_position = 0;
        total_v_samples = get_max_vertical_samples(&priv->frame_hdr);
        total_h_samples = get_max_horizontal_samples(&priv->frame_hdr);
        slice_param->num_mcus = ((priv->frame_hdr.width + total_h_samples*8 - 1)/(total_h_samples*8)) *
                                ((priv->frame_hdr.height + total_v_samples*8 -1)/(total_v_samples*8));
    }

    if (picture->slices && picture->slices->len)
        return GST_VAAPI_DECODER_STATUS_SUCCESS;
    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
}