Decode_Status VaapiDecoderH264::decodeSlice(H264NalUnit * nalu)
{
    Decode_Status status;
    VaapiPictureH264 *picture;
    VaapiSliceH264 *slice = NULL;
    H264SliceHdr *sliceHdr;
    H264SliceHdr tmpSliceHdr;
    H264ParserResult result;

    /* parser the slice header info */
    memset((void *) &tmpSliceHdr, 0, sizeof(tmpSliceHdr));
    result = h264_parser_parse_slice_hdr(&m_parser, nalu,
                                         &tmpSliceHdr, true, true);
    if (result != H264_PARSER_OK) {
        status = getStatus(result);
        goto error;
    }

    /* check info and reset VA resource if necessary */
    status = ensureContext(tmpSliceHdr.pps);
    if (status != DECODE_SUCCESS)
        return status;

    /* construct slice and parsing slice header */
    slice = new VaapiSliceH264(m_VADisplay,
                               m_VAContext,
                               nalu->data + nalu->offset, nalu->size);
    sliceHdr = &(slice->m_sliceHdr);

    memcpy((void *) sliceHdr, (void *) &tmpSliceHdr, sizeof(*sliceHdr));

    if (isNewPicture(nalu, sliceHdr)) {
        status = decodePicture(nalu, sliceHdr);
        if (status != DECODE_SUCCESS)
            goto error;
    }

    if (!fillSlice(slice, nalu)) {
        status = DECODE_FAIL;
        goto error;
    }

    m_currentPicture->addSlice((VaapiSlice *) slice);

    return DECODE_SUCCESS;

  error:
    if (slice)
        delete slice;
    return status;
}
Decode_Status VideoDecoderVP8::decodeFrame(VideoDecodeBuffer* buffer, vbp_data_vp8 *data) {
    Decode_Status status;
    bool useGraphicbuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
    mCurrentPTS = buffer->timeStamp;
    if (0 == data->num_pictures || NULL == data->pic_data) {
        WTRACE("Number of pictures is 0.");
        return DECODE_SUCCESS;
    }

    if (VP8_KEY_FRAME == data->codec_data->frame_type) {
        if (mSizeChanged && !useGraphicbuffer){
            mSizeChanged = false;
            return DECODE_FORMAT_CHANGE;
        } else {
            updateFormatInfo(data);
            bool noNeedFlush = false;
            if (useGraphicbuffer) {
                noNeedFlush = (mVideoFormatInfo.width <= mVideoFormatInfo.surfaceWidth)
                        && (mVideoFormatInfo.height <= mVideoFormatInfo.surfaceHeight);
            }
            if (mSizeChanged == true && !noNeedFlush) {
                flushSurfaceBuffers();
                mSizeChanged = false;
                return DECODE_FORMAT_CHANGE;
            }
        }
    }

    if (data->codec_data->frame_type == VP8_SKIPPED_FRAME) {
        // Do nothing for skip frame as the last frame will be rendered agian by natively
        return DECODE_SUCCESS;
    }

    status = acquireSurfaceBuffer();
    CHECK_STATUS("acquireSurfaceBuffer");

    // set referenceFrame to true if frame decoded is I/P frame, false otherwise.
    int frameType = data->codec_data->frame_type;
    mAcquiredBuffer->referenceFrame = (frameType == VP8_KEY_FRAME || frameType == VP8_INTER_FRAME);
    // assume it is frame picture.
    mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
    mAcquiredBuffer->renderBuffer.timeStamp = buffer->timeStamp;
    mAcquiredBuffer->renderBuffer.flag = 0;
    if (buffer->flag & WANT_DECODE_ONLY) {
        mAcquiredBuffer->renderBuffer.flag |= WANT_DECODE_ONLY;
    }
    if (mSizeChanged) {
        mSizeChanged = false;
        mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
    }

    // Here data->num_pictures is always equal to 1
    for (uint32_t index = 0; index < data->num_pictures; index++) {
        status = decodePicture(data, index);
        if (status != DECODE_SUCCESS) {
            endDecodingFrame(true);
            return status;
        }
    }

    if (frameType != VP8_SKIPPED_FRAME) {
        updateReferenceFrames(data);
    }

    // if sample is successfully decoded, call outputSurfaceBuffer(); otherwise
    // call releaseSurfacebuffer();
    status = outputSurfaceBuffer();
    return status;
}
Decode_Status VaapiDecoderVP8::decode(VideoDecodeBuffer * buffer)
{
    Decode_Status status;
    Vp8ParseResult result;
    bool isEOS = false;

    m_currentPTS = buffer->timeStamp;

    m_buffer = buffer->data;
    m_frameSize = buffer->size;

    DEBUG("VP8: Decode(bufsize =%d, timestamp=%ld)", m_frameSize,
          m_currentPTS);

    do {
        if (m_frameSize == 0) {
            status = DECODE_FAIL;
            break;
        }

        memset(&m_frameHdr, 0, sizeof(m_frameHdr));
        m_frameHdr.multi_frame_data = &m_currFrameContext;
        result =
            vp8_parse_frame_header(&m_frameHdr, m_buffer, 0, m_frameSize);
        status = getStatus(result);
        if (status != DECODE_SUCCESS) {
            break;
        }

        if (m_frameHdr.key_frame == VP8_KEY_FRAME) {
            status = ensureContext();
            if (status != DECODE_SUCCESS)
                return status;
        }

        status = decodePicture();
        if (status != DECODE_SUCCESS)
            break;

        if (m_frameHdr.show_frame) {
            m_currentPicture->m_timeStamp = m_currentPTS;
            m_currentPicture->output();
        } else {
            WARNING("warning: this picture isn't sent to render");
        }

        updateReferencePictures();

        if (m_frameHdr.refresh_entropy_probs) {
            memcpy(&m_lastFrameContext.token_prob_update,
                   &m_currFrameContext.token_prob_update,
                   sizeof(Vp8TokenProbUpdate));
            memcpy(&m_lastFrameContext.mv_prob_update,
                   &m_currFrameContext.mv_prob_update,
                   sizeof(Vp8MvProbUpdate));

            if (m_frameHdr.intra_16x16_prob_update_flag)
                memcpy(m_yModeProbs, m_frameHdr.intra_16x16_prob, 4);
            if (m_frameHdr.intra_chroma_prob_update_flag)
                memcpy(m_uvModeProbs, m_frameHdr.intra_chroma_prob, 3);
        } else {
            memcpy(&m_currFrameContext.token_prob_update,
                   &m_lastFrameContext.token_prob_update,
                   sizeof(Vp8TokenProbUpdate));
            memcpy(&m_currFrameContext.mv_prob_update,
                   &m_lastFrameContext.mv_prob_update,
                   sizeof(Vp8MvProbUpdate));
        }

    } while (0);

    if (status != DECODE_SUCCESS) {
        DEBUG("decode fail!!");
    }

    return status;
}
Exemple #4
0
Decode_Status VaapiDecoderVP8::decode(VideoDecodeBuffer * buffer)
{
    Decode_Status status;
    Vp8ParserResult result;

    m_currentPTS = buffer->timeStamp;

    m_buffer = buffer->data;
    m_frameSize = buffer->size;

    DEBUG("VP8: Decode(bufsize =%d, timestamp=%ld)", m_frameSize,
          m_currentPTS);

    do {
        if (m_frameSize == 0) {
            status = DECODE_FAIL;
            break;
        }

        memset(&m_frameHdr, 0, sizeof(m_frameHdr));
        result =
            vp8_parser_parse_frame_header(&m_parser, &m_frameHdr, m_buffer, m_frameSize);
        status = getStatus(result);
        if (status != DECODE_SUCCESS) {
            break;
        }

        if (m_frameHdr.key_frame) {
            status = ensureContext();
            if (status != DECODE_SUCCESS)
                return status;
        }
#if __PSB_CACHE_DRAIN_FOR_FIRST_FRAME__
        int ii = 0;
        int decodeCount = 1;

        if (m_isFirstFrame) {
            decodeCount = 1280 * 720 / m_frameWidth / m_frameHeight * 2;
            m_isFirstFrame = false;
        }

        do {
            status = decodePicture();
        } while (status == DECODE_SUCCESS && ++ii < decodeCount);

#else
        status = decodePicture();
#endif

        if (status != DECODE_SUCCESS)
            break;

        if (m_frameHdr.show_frame) {
            m_currentPicture->m_timeStamp = m_currentPTS;
            //FIXME: add output
            outputPicture(m_currentPicture);
        } else {
            WARNING("warning: this picture isn't sent to render");
        }

        updateReferencePictures();

    } while (0);

    if (status != DECODE_SUCCESS) {
        DEBUG("decode fail!!");
    }

    return status;
}