static GstVaapiDecoderStatus
gst_vaapi_decoder_vp8_end_frame (GstVaapiDecoder * base_decoder)
{
  GstVaapiDecoderVp8 *const decoder = GST_VAAPI_DECODER_VP8_CAST (base_decoder);

  return decode_current_picture (decoder);
}
static GstVaapiDecoderStatus
gst_vaapi_decoder_jpeg_end_frame (GstVaapiDecoder * base_decoder)
{
  GstVaapiDecoderJpeg *const decoder =
      GST_VAAPI_DECODER_JPEG_CAST (base_decoder);

  return decode_current_picture (decoder);
}
Esempio n. 3
0
static GstVaapiDecoderStatus
decode_picture(
    GstVaapiDecoderJpeg *decoder, 
    guint8               profile,
    guchar              *buf,
    guint                buf_size,
    GstClockTime         pts
)
{
    GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
    GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
    GstVaapiPicture *picture;
    GstVaapiDecoderStatus status;

    switch (profile) {
    case GST_JPEG_MARKER_SOF_MIN:
        priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
        break;
    default:
        GST_ERROR("unsupported profile %d", profile);
        return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
    }

    memset(frame_hdr, 0, sizeof(*frame_hdr));
    if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf, buf_size, 0)) {
        GST_ERROR("failed to parse image");
        return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
    }
    priv->height = frame_hdr->height;
    priv->width  = frame_hdr->width;

    status = ensure_context(decoder);
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
        GST_ERROR("failed to reset context");
        return status;
    }

    if (priv->current_picture && !decode_current_picture(decoder))
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;

    picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
    if (!picture) {
        GST_ERROR("failed to allocate picture");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }
    gst_vaapi_picture_replace(&priv->current_picture, picture);
    gst_vaapi_picture_unref(picture);

    if (!fill_picture(decoder, picture, frame_hdr))
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;

    /* Update presentation time */
    picture->pts = pts;
    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
Esempio n. 4
0
static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderVC1 *decoder)
{
    GstVaapiDecoderVC1Private * const priv = decoder->priv;

    if (priv->current_picture && !decode_current_picture(decoder))
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;

    gst_vaapi_dpb_flush(priv->dpb);
    return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
}
Esempio n. 5
0
static GstVaapiDecoderStatus
decode_frame(GstVaapiDecoderVC1 *decoder, GstVC1BDU *rbdu, GstVC1BDU *ebdu)
{
    GstVaapiDecoderVC1Private * const priv = decoder->priv;
    GstVC1SeqHdr * const seq_hdr = &priv->seq_hdr;
    GstVC1AdvancedSeqHdr *advanced = &seq_hdr->advanced;
    GstVC1FrameHdr * const frame_hdr = &priv->frame_hdr;
    GstVC1ParserResult result;
    GstVaapiPicture *picture;
    GstVaapiSlice *slice;
    GstVaapiDecoderStatus status;
    VASliceParameterBufferVC1 *slice_param;
    GstClockTime pts;
    gint32 poc;

    status = ensure_context(decoder);
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
        GST_DEBUG("failed to reset context");
        return status;
    }

    if (priv->current_picture ) {
        if(!decode_current_picture(decoder))
            return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }

    picture = GST_VAAPI_PICTURE_NEW(VC1, decoder);
    if (!picture) {
        GST_DEBUG("failed to allocate picture");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }

    if (!gst_vc1_bitplanes_ensure_size(priv->bitplanes, seq_hdr)) {
        GST_DEBUG("failed to allocate bitplanes");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }

    memset(frame_hdr, 0, sizeof(*frame_hdr));
    result = gst_vc1_parse_frame_header(
        rbdu->data + rbdu->offset,
        rbdu->size,
        frame_hdr,
        seq_hdr,
        priv->bitplanes
    );
    if (result != GST_VC1_PARSER_OK) {
        GST_DEBUG("failed to parse frame layer");
        return get_status(result);
    }

    switch (frame_hdr->ptype) {
    case GST_VC1_PICTURE_TYPE_I:
        picture->type   = GST_VAAPI_PICTURE_TYPE_I;
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
        break;
    case GST_VC1_PICTURE_TYPE_SKIPPED:
    case GST_VC1_PICTURE_TYPE_P:
        picture->type   = GST_VAAPI_PICTURE_TYPE_P;
        GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
        break;
    case GST_VC1_PICTURE_TYPE_B:
        picture->type   = GST_VAAPI_PICTURE_TYPE_B;
        priv->successive_bfrm_cnt += 1;

        break;
    case GST_VC1_PICTURE_TYPE_BI:
        picture->type   = GST_VAAPI_PICTURE_TYPE_BI;
        priv->successive_bfrm_cnt += 1;
        break;
    default:
        GST_DEBUG("unsupported picture type %d", frame_hdr->ptype);
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    }

    /*As far as dpb is only storing the reference pictures, always increesing poc should work fine. 
      But we should explictly calculate the pts of frames*/
    
    if(!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
        picture->poc = priv->frm_cnt - 1;
    else
        picture->poc = priv->frm_cnt;

    gst_vaapi_decoder_get_framerate(GST_VAAPI_DECODER_CAST(decoder), &priv->fps_n, &priv->fps_d);
    pts =  gst_util_uint64_scale(picture->poc,
                                 GST_SECOND * priv->fps_d, priv->fps_n);

    if(GST_VAAPI_PICTURE_IS_REFERENCE(picture)){
        if (priv->successive_bfrm_cnt) {
            poc = priv->frm_cnt - priv->successive_bfrm_cnt - 1;
            pts = gst_util_uint64_scale(picture->poc,
                                 GST_SECOND * priv->fps_d, priv->fps_n);
            gst_vaapi_dpb_reset_pts (priv->dpb, poc, pts);
            priv->successive_bfrm_cnt = 0;
        }
    }

    picture->pts = pts;
    priv->frm_cnt++;

    if (!fill_picture(decoder, picture))
        return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
    slice = GST_VAAPI_SLICE_NEW(
        VC1,
        decoder,
        ebdu->data + ebdu->sc_offset,
        ebdu->size + ebdu->offset - ebdu->sc_offset
    );
    if (!slice) {
        GST_DEBUG("failed to allocate slice");
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
    }
    gst_vaapi_picture_add_slice(picture, slice);

    /* Fill in VASliceParameterBufferVC1 */
    slice_param                            = slice->param;
    slice_param->macroblock_offset         = 8 * (ebdu->offset - ebdu->sc_offset) + frame_hdr->header_size;
    slice_param->slice_vertical_position   = 0;

    gst_vaapi_picture_replace(&priv->current_picture, picture);
    gst_vaapi_picture_unref(picture);

    return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
Esempio n. 6
0
static GstVaapiDecoderStatus
decode_buffer(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer)
{
    GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
    GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
    GstJpegMarkerSegment seg;
    GstJpegScanSegment scan_seg;
    GstClockTime pts;
    guchar *buf;
    guint buf_size, ofs;
    gboolean append_ecs;

    buf      = GST_BUFFER_DATA(buffer);
    buf_size = GST_BUFFER_SIZE(buffer);
    if (!buf && buf_size == 0)
        return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;

    memset(&scan_seg, 0, sizeof(scan_seg));

    pts = GST_BUFFER_TIMESTAMP(buffer);
    ofs = 0;
    while (gst_jpeg_parse(&seg, buf, buf_size, ofs)) {
        if (seg.size < 0) {
            GST_DEBUG("buffer to short for parsing");
            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
        }
        ofs += seg.size;

        /* Decode scan, if complete */
        if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) {
            scan_seg.data_size = seg.offset - scan_seg.data_offset;
            scan_seg.is_valid  = TRUE;
        }
        if (scan_seg.is_valid) {
            status = decode_scan(
                decoder,
                buf + scan_seg.header_offset,
                scan_seg.header_size,
                buf + scan_seg.data_offset,
                scan_seg.data_size
            );
            if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
                break;
            memset(&scan_seg, 0, sizeof(scan_seg));
        }

        append_ecs = TRUE;
        switch (seg.marker) {
        case GST_JPEG_MARKER_SOI:
            priv->has_quant_table = FALSE;
            priv->has_huf_table   = FALSE;
            priv->mcu_restart     = 0;
            status = GST_VAAPI_DECODER_STATUS_SUCCESS;
            break;
        case GST_JPEG_MARKER_EOI:
            if (decode_current_picture(decoder)) {
                /* Get out of the loop, trailing data is not needed */
                status = GST_VAAPI_DECODER_STATUS_SUCCESS;
                goto end;
            }
            status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
            break;
        case GST_JPEG_MARKER_DHT:
            status = decode_huffman_table(decoder, buf + seg.offset, seg.size);
            break;
        case GST_JPEG_MARKER_DQT:
            status = decode_quant_table(decoder, buf + seg.offset, seg.size);
            break;
        case GST_JPEG_MARKER_DRI:
            status = decode_restart_interval(decoder, buf + seg.offset, seg.size);
            break;
        case GST_JPEG_MARKER_DAC:
            GST_ERROR("unsupported arithmetic coding mode");
            status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
            break;
        case GST_JPEG_MARKER_SOS:
            scan_seg.header_offset = seg.offset;
            scan_seg.header_size   = seg.size;
            scan_seg.data_offset   = seg.offset + seg.size;
            scan_seg.data_size     = 0;
            append_ecs             = FALSE;
            break;
        default:
            /* Restart marker */
            if (seg.marker >= GST_JPEG_MARKER_RST_MIN &&
                seg.marker <= GST_JPEG_MARKER_RST_MAX) {
                append_ecs = FALSE;
                break;
            }

            /* Frame header */
            if (seg.marker >= GST_JPEG_MARKER_SOF_MIN &&
                seg.marker <= GST_JPEG_MARKER_SOF_MAX) {
                status = decode_picture(
                    decoder,
                    seg.marker,
                    buf + seg.offset, seg.size,
                    pts
                );
                break;
            }

            /* Application segments */
            if (seg.marker >= GST_JPEG_MARKER_APP_MIN &&
                seg.marker <= GST_JPEG_MARKER_APP_MAX) {
                status = GST_VAAPI_DECODER_STATUS_SUCCESS;
                break;
            }

            GST_WARNING("unsupported marker (0x%02x)", seg.marker);
            status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
            break;
        }

        /* Append entropy coded segments */
        if (append_ecs)
            scan_seg.data_size = seg.offset - scan_seg.data_offset;

        if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
            break;
    }
end:
    return status;
}