void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop) {
    OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
    def->format.video.nFrameWidth = mWidth;
    def->format.video.nFrameHeight = mHeight;
    def->format.video.nStride = def->format.video.nFrameWidth;
    def->format.video.nSliceHeight = def->format.video.nFrameHeight;

    def->nBufferSize = def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2;

    def = &editPortInfo(kOutputPortIndex)->mDef;
    def->format.video.nFrameWidth = outputBufferWidth();
    def->format.video.nFrameHeight = outputBufferHeight();
    def->format.video.nStride = def->format.video.nFrameWidth;
    def->format.video.nSliceHeight = def->format.video.nFrameHeight;

    def->nBufferSize =
            (def->format.video.nFrameWidth *
             def->format.video.nFrameHeight * 3) / 2;

    if (updateCrop) {
        mCropLeft = 0;
        mCropTop = 0;
        mCropWidth = mWidth;
        mCropHeight = mHeight;
    }
}
void SoftVideoDecoderOMXComponent::copyYV12FrameToOutputBuffer(
        uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
        size_t srcYStride, size_t srcUStride, size_t srcVStride) {
    size_t dstYStride = outputBufferWidth();
    size_t dstUVStride = dstYStride / 2;
    size_t dstHeight = outputBufferHeight();
    uint8_t *dstStart = dst;

    for (size_t i = 0; i < mHeight; ++i) {
         memcpy(dst, srcY, mWidth);
         srcY += srcYStride;
         dst += dstYStride;
    }

    dst = dstStart + dstYStride * dstHeight;
    for (size_t i = 0; i < mHeight / 2; ++i) {
         memcpy(dst, srcU, mWidth / 2);
         srcU += srcUStride;
         dst += dstUVStride;
    }

    dst = dstStart + (5 * dstYStride * dstHeight) / 4;
    for (size_t i = 0; i < mHeight / 2; ++i) {
         memcpy(dst, srcV, mWidth / 2);
         srcV += srcVStride;
         dst += dstUVStride;
    }
}
void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
    OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
    outDef->format.video.nFrameWidth = outputBufferWidth();
    outDef->format.video.nFrameHeight = outputBufferHeight();
    outDef->format.video.nStride = outDef->format.video.nFrameWidth;
    outDef->format.video.nSliceHeight = outDef->format.video.nFrameHeight;

    outDef->nBufferSize =
        (outDef->format.video.nStride * outDef->format.video.nSliceHeight * 3) / 2;

    OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
    inDef->format.video.nFrameWidth = mWidth;
    inDef->format.video.nFrameHeight = mHeight;
    // input port is compressed, hence it has no stride
    inDef->format.video.nStride = 0;
    inDef->format.video.nSliceHeight = 0;

    // when output format changes, input buffer size does not actually change
    if (updateInputSize) {
        inDef->nBufferSize = max(
                                 outDef->nBufferSize / mMinCompressionRatio,
                                 max(mMinInputBufferSize, inDef->nBufferSize));
    }

    if (updateCrop) {
        mCropLeft = 0;
        mCropTop = 0;
        mCropWidth = mWidth;
        mCropHeight = mHeight;
    }
}
bool SoftMPEG4::handlePortSettingsChange() {
    uint32_t disp_width, disp_height;
    PVGetVideoDimensions(mHandle, (int32 *)&disp_width, (int32 *)&disp_height);

    uint32_t buf_width, buf_height;
    PVGetBufferDimensions(mHandle, (int32 *)&buf_width, (int32 *)&buf_height);

    CHECK_LE(disp_width, buf_width);
    CHECK_LE(disp_height, buf_height);

    ALOGV("disp_width = %d, disp_height = %d, buf_width = %d, buf_height = %d",
            disp_width, disp_height, buf_width, buf_height);

    CropSettingsMode cropSettingsMode = kCropUnSet;
    if (disp_width != buf_width || disp_height != buf_height) {
        cropSettingsMode = kCropSet;

        if (mCropWidth != disp_width || mCropHeight != disp_height) {
            mCropLeft = 0;
            mCropTop = 0;
            mCropWidth = disp_width;
            mCropHeight = disp_height;
            cropSettingsMode = kCropChanged;
        }
    }

    bool portWillReset = false;
    const bool fakeStride = true;
    SoftVideoDecoderOMXComponent::handlePortSettingsChange(
            &portWillReset, buf_width, buf_height, cropSettingsMode, fakeStride);
    if (portWillReset) {
        if (mMode == MODE_H263) {
            PVCleanUpVideoDecoder(mHandle);

            uint8_t *vol_data[1];
            int32_t vol_size = 0;

            vol_data[0] = NULL;
            if (!PVInitVideoDecoder(
                    mHandle, vol_data, &vol_size, 1, outputBufferWidth(), outputBufferHeight(),
                    H263_MODE)) {
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return true;
            }
        }

        mFramesConfigured = false;
    }

    return portWillReset;
}
bool SoftAVC::setDecodeArgs(
        ivd_video_decode_ip_t *ps_dec_ip,
        ivd_video_decode_op_t *ps_dec_op,
        OMX_BUFFERHEADERTYPE *inHeader,
        OMX_BUFFERHEADERTYPE *outHeader,
        size_t timeStampIx) {
    size_t sizeY = outputBufferWidth() * outputBufferHeight();
    size_t sizeUV;

    ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
    ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);

    ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;

    /* When in flush and after EOS with zero byte input,
     * inHeader is set to zero. Hence check for non-null */
    if (inHeader) {
        ps_dec_ip->u4_ts = timeStampIx;
        ps_dec_ip->pv_stream_buffer =
            inHeader->pBuffer + inHeader->nOffset + mInputOffset;
        ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen - mInputOffset;
    } else {
        ps_dec_ip->u4_ts = 0;
        ps_dec_ip->pv_stream_buffer = NULL;
        ps_dec_ip->u4_num_Bytes = 0;
    }

    sizeUV = sizeY / 4;
    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;

    uint8_t *pBuf;
    if (outHeader) {
        if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
            android_errorWriteLog(0x534e4554, "27833616");
            return false;
        }
        pBuf = outHeader->pBuffer;
    } else {
        // mFlushOutBuffer always has the right size.
        pBuf = mFlushOutBuffer;
    }

    ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
    ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
    ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
    ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
    return true;
}
Пример #6
0
void SoftAVC::setDecodeArgs(
        ivd_video_decode_ip_t *ps_dec_ip,
        ivd_video_decode_op_t *ps_dec_op,
        OMX_BUFFERHEADERTYPE *inHeader,
        OMX_BUFFERHEADERTYPE *outHeader,
        size_t timeStampIx) {
    size_t sizeY = outputBufferWidth() * outputBufferHeight();
    size_t sizeUV;
    uint8_t *pBuf;

    ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
    ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);

    ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;

    /* When in flush and after EOS with zero byte input,
     * inHeader is set to zero. Hence check for non-null */
    if (inHeader) {
        ps_dec_ip->u4_ts = timeStampIx;
        ps_dec_ip->pv_stream_buffer =
            inHeader->pBuffer + inHeader->nOffset;
        ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
    } else {
        ps_dec_ip->u4_ts = 0;
        ps_dec_ip->pv_stream_buffer = NULL;
        ps_dec_ip->u4_num_Bytes = 0;
    }

    if (outHeader) {
        pBuf = outHeader->pBuffer;
    } else {
        pBuf = mFlushOutBuffer;
    }

    sizeUV = sizeY / 4;
    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
    ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;

    ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
    ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
    ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
    ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
    return;
}
Пример #7
0
void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
    /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
    if (kOutputPortIndex == portIndex) {
        setFlushMode();

        /* Allocate a picture buffer to flushed data */
        uint32_t displayStride = outputBufferWidth();
        uint32_t displayHeight = outputBufferHeight();

        uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
        mFlushOutBuffer = (uint8_t *)memalign(128, bufferSize);
        if (NULL == mFlushOutBuffer) {
            ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
            return;
        }

        while (true) {
            ivd_video_decode_ip_t s_dec_ip;
            ivd_video_decode_op_t s_dec_op;
            IV_API_CALL_STATUS_T status;
            size_t sizeY, sizeUV;

            setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);

            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
                    (void *)&s_dec_op);
            if (0 == s_dec_op.u4_output_present) {
                resetPlugin();
                break;
            }
        }

        if (mFlushOutBuffer) {
            free(mFlushOutBuffer);
            mFlushOutBuffer = NULL;
        }

    }
}
Пример #8
0
void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
    UNUSED(portIndex);

    if (mSignalledError) {
        return;
    }
    if (mOutputPortSettingsChange != NONE) {
        return;
    }

    if (NULL == mCodecCtx) {
        if (OK != initDecoder()) {
            ALOGE("Failed to initialize decoder");
            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
            mSignalledError = true;
            return;
        }
    }
    if (outputBufferWidth() != mStride) {
        /* Set the run-time (dynamic) parameters */
        mStride = outputBufferWidth();
        setParams(mStride);
    }

    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);

    while (!outQueue.empty()) {
        BufferInfo *inInfo;
        OMX_BUFFERHEADERTYPE *inHeader;

        BufferInfo *outInfo;
        OMX_BUFFERHEADERTYPE *outHeader;
        size_t timeStampIx;

        inInfo = NULL;
        inHeader = NULL;

        if (!mIsInFlush) {
            if (!inQueue.empty()) {
                inInfo = *inQueue.begin();
                inHeader = inInfo->mHeader;
            } else {
                break;
            }
        }

        outInfo = *outQueue.begin();
        outHeader = outInfo->mHeader;
        outHeader->nFlags = 0;
        outHeader->nTimeStamp = 0;
        outHeader->nOffset = 0;

        if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
            mReceivedEOS = true;
            if (inHeader->nFilledLen == 0) {
                inQueue.erase(inQueue.begin());
                inInfo->mOwnedByUs = false;
                notifyEmptyBufferDone(inHeader);
                inHeader = NULL;
                setFlushMode();
            }
        }

        /* Get a free slot in timestamp array to hold input timestamp */
        {
            size_t i;
            timeStampIx = 0;
            for (i = 0; i < MAX_TIME_STAMPS; i++) {
                if (!mTimeStampsValid[i]) {
                    timeStampIx = i;
                    break;
                }
            }
            if (inHeader != NULL) {
                mTimeStampsValid[timeStampIx] = true;
                mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
            }
        }

        {
            ivd_video_decode_ip_t s_dec_ip;
            ivd_video_decode_op_t s_dec_op;
            WORD32 timeDelay, timeTaken;
            size_t sizeY, sizeUV;

            if (!setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx)) {
                ALOGE("Decoder arg setup failed");
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return;
            }

            GETTIME(&mTimeStart, NULL);
            /* Compute time elapsed between end of previous decode()
             * to start of current decode() */
            TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);

            IV_API_CALL_STATUS_T status;
            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);

            bool unsupportedResolution =
                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));

            /* Check for unsupported dimensions */
            if (unsupportedResolution) {
                ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
                mSignalledError = true;
                return;
            }

            bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
            if (allocationFailed) {
                ALOGE("Allocation failure in decoder");
                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
                mSignalledError = true;
                return;
            }

            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));

            getVUIParams();

            GETTIME(&mTimeEnd, NULL);
            /* Compute time taken for decode() */
            TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);

            ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
                   s_dec_op.u4_num_bytes_consumed);
            if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
                mFlushNeeded = true;
            }

            if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
                /* If the input did not contain picture data, then ignore
                 * the associated timestamp */
                mTimeStampsValid[timeStampIx] = false;
            }

            // If the decoder is in the changing resolution mode and there is no output present,
            // that means the switching is done and it's ready to reset the decoder and the plugin.
            if (mChangingResolution && !s_dec_op.u4_output_present) {
                mChangingResolution = false;
                resetDecoder();
                resetPlugin();
                mStride = outputBufferWidth();
                setParams(mStride);
                continue;
            }

            if (resChanged) {
                mChangingResolution = true;
                if (mFlushNeeded) {
                    setFlushMode();
                }
                continue;
            }

            // Combine the resolution change and coloraspects change in one PortSettingChange event
            // if necessary.
            if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
                uint32_t width = s_dec_op.u4_pic_wd;
                uint32_t height = s_dec_op.u4_pic_ht;
                bool portWillReset = false;
                handlePortSettingsChange(&portWillReset, width, height);

                if (portWillReset) {
                    resetDecoder();
                    resetPlugin();
                    return;
                }
            } else if (mUpdateColorAspects) {
                notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
                    kDescribeColorAspectsIndex, NULL);
                mUpdateColorAspects = false;
                return;
            }

            if (s_dec_op.u4_output_present) {
                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;

                outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
                mTimeStampsValid[s_dec_op.u4_ts] = false;

                outInfo->mOwnedByUs = false;
                outQueue.erase(outQueue.begin());
                outInfo = NULL;
                notifyFillBufferDone(outHeader);
                outHeader = NULL;
            } else if (mIsInFlush) {
                /* If in flush mode and no output is returned by the codec,
                 * then come out of flush mode */
                mIsInFlush = false;

                /* If EOS was recieved on input port and there is no output
                 * from the codec, then signal EOS on output port */
                if (mReceivedEOS) {
                    outHeader->nFilledLen = 0;
                    outHeader->nFlags |= OMX_BUFFERFLAG_EOS;

                    outInfo->mOwnedByUs = false;
                    outQueue.erase(outQueue.begin());
                    outInfo = NULL;
                    notifyFillBufferDone(outHeader);
                    outHeader = NULL;
                    resetPlugin();
                }
            }
        }

        /* If input EOS is seen and decoder is not in flush mode,
         * set the decoder in flush mode.
         * There can be a case where EOS is sent along with last picture data
         * In that case, only after decoding that input data, decoder has to be
         * put in flush. This case is handled here  */

        if (mReceivedEOS && !mIsInFlush) {
            setFlushMode();
        }

        // TODO: Handle more than one picture data
        if (inHeader != NULL) {
            inInfo->mOwnedByUs = false;
            inQueue.erase(inQueue.begin());
            inInfo = NULL;
            notifyEmptyBufferDone(inHeader);
            inHeader = NULL;
        }
    }
}
Пример #9
0
void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
    UNUSED(portIndex);

    if (mSignalledError) {
        return;
    }
    if (mOutputPortSettingsChange != NONE) {
        return;
    }

    List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
    List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);

    /* If input EOS is seen and decoder is not in flush mode,
     * set the decoder in flush mode.
     * There can be a case where EOS is sent along with last picture data
     * In that case, only after decoding that input data, decoder has to be
     * put in flush. This case is handled here  */

    if (mReceivedEOS && !mIsInFlush) {
        setFlushMode();
    }

    while (!outQueue.empty()) {
        BufferInfo *inInfo;
        OMX_BUFFERHEADERTYPE *inHeader;

        BufferInfo *outInfo;
        OMX_BUFFERHEADERTYPE *outHeader;
        size_t timeStampIx;

        inInfo = NULL;
        inHeader = NULL;

        if (!mIsInFlush) {
            if (!inQueue.empty()) {
                inInfo = *inQueue.begin();
                inHeader = inInfo->mHeader;
                if (inHeader == NULL) {
                    inQueue.erase(inQueue.begin());
                    inInfo->mOwnedByUs = false;
                    continue;
                }
            } else {
                break;
            }
        }

        outInfo = *outQueue.begin();
        outHeader = outInfo->mHeader;
        outHeader->nFlags = 0;
        outHeader->nTimeStamp = 0;
        outHeader->nOffset = 0;

        if (inHeader != NULL) {
            if (inHeader->nFilledLen == 0) {
                inQueue.erase(inQueue.begin());
                inInfo->mOwnedByUs = false;
                notifyEmptyBufferDone(inHeader);

                if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
                    continue;
                }

                mReceivedEOS = true;
                inHeader = NULL;
                setFlushMode();
            } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
                mReceivedEOS = true;
            }
        }

        // When there is an init required and the decoder is not in flush mode,
        // update output port's definition and reinitialize decoder.
        if (mInitNeeded && !mIsInFlush) {
            bool portWillReset = false;

            status_t err = reInitDecoder(mNewWidth, mNewHeight);
            if (err != OK) {
                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
                mSignalledError = true;
                return;
            }

            handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
            return;
        }

        /* Get a free slot in timestamp array to hold input timestamp */
        {
            size_t i;
            timeStampIx = 0;
            for (i = 0; i < MAX_TIME_STAMPS; i++) {
                if (!mTimeStampsValid[i]) {
                    timeStampIx = i;
                    break;
                }
            }
            if (inHeader != NULL) {
                mTimeStampsValid[timeStampIx] = true;
                mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
            }
        }

        {
            ivd_video_decode_ip_t s_dec_ip;
            ivd_video_decode_op_t s_dec_op;
            WORD32 timeDelay, timeTaken;
            size_t sizeY, sizeUV;

            setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
            // If input dump is enabled, then write to file
            DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes);

            GETTIME(&mTimeStart, NULL);
            /* Compute time elapsed between end of previous decode()
             * to start of current decode() */
            TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);

            IV_API_CALL_STATUS_T status;
            status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);

            bool unsupportedDimensions =
                (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
            bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
            bool unsupportedLevel = (IH264D_UNSUPPORTED_LEVEL == (s_dec_op.u4_error_code & 0xFF));

            GETTIME(&mTimeEnd, NULL);
            /* Compute time taken for decode() */
            TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);

            PRINT_TIME("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
                   s_dec_op.u4_num_bytes_consumed);
            if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
                mFlushNeeded = true;
            }

            if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
                /* If the input did not contain picture data, then ignore
                 * the associated timestamp */
                mTimeStampsValid[timeStampIx] = false;
            }


            // This is needed to handle CTS DecoderTest testCodecResetsH264WithoutSurface,
            // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
            if (unsupportedDimensions && !mFlushNeeded) {
                bool portWillReset = false;
                mNewWidth = s_dec_op.u4_pic_wd;
                mNewHeight = s_dec_op.u4_pic_ht;

                status_t err = reInitDecoder(mNewWidth, mNewHeight);
                if (err != OK) {
                    notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
                    mSignalledError = true;
                    return;
                }

                handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);

                setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);

                ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
                return;
            }

            if (unsupportedLevel && !mFlushNeeded) {

                mNewLevel = 51;

                status_t err = reInitDecoder(mNewWidth, mNewHeight);
                if (err != OK) {
                    notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
                    mSignalledError = true;
                    return;
                }

                setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);

                ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
                return;
            }

            // If the decoder is in the changing resolution mode and there is no output present,
            // that means the switching is done and it's ready to reset the decoder and the plugin.
            if (mChangingResolution && !s_dec_op.u4_output_present) {
                mChangingResolution = false;
                resetDecoder();
                resetPlugin();
                continue;
            }

            if (unsupportedDimensions || resChanged) {
                mChangingResolution = true;
                if (mFlushNeeded) {
                    setFlushMode();
                }

                if (unsupportedDimensions) {
                    mNewWidth = s_dec_op.u4_pic_wd;
                    mNewHeight = s_dec_op.u4_pic_ht;
                    mInitNeeded = true;
                }
                continue;
            }

            if (unsupportedLevel) {

                if (mFlushNeeded) {
                    setFlushMode();
                }

                mNewLevel = 51;
                mInitNeeded = true;
                continue;
            }

            if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
                uint32_t width = s_dec_op.u4_pic_wd;
                uint32_t height = s_dec_op.u4_pic_ht;
                bool portWillReset = false;
                handlePortSettingsChange(&portWillReset, width, height);

                if (portWillReset) {
                    resetDecoder();
                    return;
                }
            }

            if (s_dec_op.u4_output_present) {
                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;

                outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
                mTimeStampsValid[s_dec_op.u4_ts] = false;

                outInfo->mOwnedByUs = false;
                outQueue.erase(outQueue.begin());
                outInfo = NULL;
                notifyFillBufferDone(outHeader);
                outHeader = NULL;
            } else {
                /* If in flush mode and no output is returned by the codec,
                 * then come out of flush mode */
                mIsInFlush = false;

                /* If EOS was recieved on input port and there is no output
                 * from the codec, then signal EOS on output port */
                if (mReceivedEOS) {
                    outHeader->nFilledLen = 0;
                    outHeader->nFlags |= OMX_BUFFERFLAG_EOS;

                    outInfo->mOwnedByUs = false;
                    outQueue.erase(outQueue.begin());
                    outInfo = NULL;
                    notifyFillBufferDone(outHeader);
                    outHeader = NULL;
                    resetPlugin();
                }
            }
        }

        if (inHeader != NULL) {
            inInfo->mOwnedByUs = false;
            inQueue.erase(inQueue.begin());
            inInfo = NULL;
            notifyEmptyBufferDone(inHeader);
            inHeader = NULL;
        }
    }
}
Пример #10
0
status_t SoftMPEG2::initDecoder() {
    IV_API_CALL_STATUS_T status;

    UWORD32 u4_num_reorder_frames;
    UWORD32 u4_num_ref_frames;
    UWORD32 u4_share_disp_buf;

    mNumCores = GetCPUCoreCount();
    mWaitForI = true;

    /* Initialize number of ref and reorder modes (for MPEG2) */
    u4_num_reorder_frames = 16;
    u4_num_ref_frames = 16;
    u4_share_disp_buf = 0;

    uint32_t displayStride = outputBufferWidth();
    uint32_t displayHeight = outputBufferHeight();
    uint32_t displaySizeY = displayStride * displayHeight;

    {
        iv_num_mem_rec_ip_t s_num_mem_rec_ip;
        iv_num_mem_rec_op_t s_num_mem_rec_op;

        s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
        s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
        s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;

        status = ivdec_api_function(
                mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op);
        if (IV_SUCCESS != status) {
            ALOGE("Error in getting mem records: 0x%x",
                    s_num_mem_rec_op.u4_error_code);
            return UNKNOWN_ERROR;
        }

        mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
    }

    mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc(
            128, mNumMemRecords * sizeof(iv_mem_rec_t));
    if (mMemRecords == NULL) {
        ALOGE("Allocation failure");
        return NO_MEMORY;
    }

    memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));

    {
        size_t i;
        ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
        ivdext_fill_mem_rec_op_t s_fill_mem_op;
        iv_mem_rec_t *ps_mem_rec;

        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
            sizeof(ivdext_fill_mem_rec_ip_t);

        s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
        s_fill_mem_ip.e_output_format = mIvColorFormat;

        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
        s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
        s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
            sizeof(ivdext_fill_mem_rec_op_t);

        ps_mem_rec = mMemRecords;
        for (i = 0; i < mNumMemRecords; i++) {
            ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
        }

        status = ivdec_api_function(
                mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op);

        if (IV_SUCCESS != status) {
            ALOGE("Error in filling mem records: 0x%x",
                    s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
            return UNKNOWN_ERROR;
        }
        mNumMemRecords =
            s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;

        ps_mem_rec = mMemRecords;

        for (i = 0; i < mNumMemRecords; i++) {
            ps_mem_rec->pv_base = ivd_aligned_malloc(
                    ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
            if (ps_mem_rec->pv_base == NULL) {
                ALOGE("Allocation failure for memory record #%zu of size %u",
                        i, ps_mem_rec->u4_mem_size);
                status = IV_FAIL;
                return NO_MEMORY;
            }

            ps_mem_rec++;
        }
    }

    /* Initialize the decoder */
    {
        ivdext_init_ip_t s_init_ip;
        ivdext_init_op_t s_init_op;

        void *dec_fxns = (void *)ivdec_api_function;

        s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
        s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
        s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
        s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
        s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;

        s_init_ip.u4_share_disp_buf = u4_share_disp_buf;

        s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);

        s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
        s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;

        mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
        mCodecCtx->pv_fxns = dec_fxns;
        mCodecCtx->u4_size = sizeof(iv_obj_t);

        status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op);
        if (status != IV_SUCCESS) {
            ALOGE("Error in init: 0x%x",
                    s_init_op.s_ivd_init_op_t.u4_error_code);
            return UNKNOWN_ERROR;
        }
    }

    /* Reset the plugin state */
    resetPlugin();

    /* Set the run time (dynamic) parameters */
    setParams(displayStride);

    /* Set number of cores/threads to be used by the codec */
    setNumCores();

    /* Get codec version */
    logVersion();

    /* Allocate internal picture buffer */
    uint32_t bufferSize = displaySizeY * 3 / 2;
    mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
    if (NULL == mFlushOutBuffer) {
        ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
        return NO_MEMORY;
    }

    mInitNeeded = false;
    mFlushNeeded = false;
    return OK;
}
Пример #11
0
void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
    if (mOutputPortSettingsChange != NONE) {
        return;
    }

    List<BufferInfo *> &inQueue = getPortQueue(0);
    List<BufferInfo *> &outQueue = getPortQueue(1);
    bool EOSseen = false;

    while (!inQueue.empty() && !outQueue.empty()) {
        BufferInfo *inInfo = *inQueue.begin();
        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;

        BufferInfo *outInfo = *outQueue.begin();
        OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;

        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
            EOSseen = true;
            if (inHeader->nFilledLen == 0) {
                inQueue.erase(inQueue.begin());
                inInfo->mOwnedByUs = false;
                notifyEmptyBufferDone(inHeader);

                outHeader->nFilledLen = 0;
                outHeader->nFlags = OMX_BUFFERFLAG_EOS;

                outQueue.erase(outQueue.begin());
                outInfo->mOwnedByUs = false;
                notifyFillBufferDone(outHeader);
                return;
            }
        }

        if (mImg == NULL) {
            if (vpx_codec_decode(
                        (vpx_codec_ctx_t *)mCtx,
                        inHeader->pBuffer + inHeader->nOffset,
                        inHeader->nFilledLen,
                        NULL,
                        0)) {
                ALOGE("on2 decoder failed to decode frame.");

                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                return;
            }
            vpx_codec_iter_t iter = NULL;
            mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
        }

        if (mImg != NULL) {
            CHECK_EQ(mImg->fmt, VPX_IMG_FMT_I420);

            uint32_t width = mImg->d_w;
            uint32_t height = mImg->d_h;
            bool portWillReset = false;
            handlePortSettingsChange(&portWillReset, width, height);
            if (portWillReset) {
                return;
            }

            outHeader->nOffset = 0;
            outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
            outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0;
            outHeader->nTimeStamp = inHeader->nTimeStamp;
            if (outHeader->nAllocLen >= outHeader->nFilledLen) {
                uint8_t *dst = outHeader->pBuffer;
                const uint8_t *srcY = (const uint8_t *)mImg->planes[VPX_PLANE_Y];
                const uint8_t *srcU = (const uint8_t *)mImg->planes[VPX_PLANE_U];
                const uint8_t *srcV = (const uint8_t *)mImg->planes[VPX_PLANE_V];
                size_t srcYStride = mImg->stride[VPX_PLANE_Y];
                size_t srcUStride = mImg->stride[VPX_PLANE_U];
                size_t srcVStride = mImg->stride[VPX_PLANE_V];
                copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
            } else {
                ALOGE("b/27597103, buffer too small");
                outHeader->nFilledLen = 0;
            }

            mImg = NULL;
            outInfo->mOwnedByUs = false;
            outQueue.erase(outQueue.begin());
            outInfo = NULL;
            notifyFillBufferDone(outHeader);
            outHeader = NULL;
        }

        inInfo->mOwnedByUs = false;
        inQueue.erase(inQueue.begin());
        inInfo = NULL;
        notifyEmptyBufferDone(inHeader);
        inHeader = NULL;
    }
}
void SoftMPEG4::onQueueFilled(OMX_U32 /* portIndex */) {
    if (mSignalledError || mOutputPortSettingsChange != NONE) {
        return;
    }

    List<BufferInfo *> &inQueue = getPortQueue(0);
    List<BufferInfo *> &outQueue = getPortQueue(1);

    while (!inQueue.empty() && outQueue.size() == kNumOutputBuffers) {
        BufferInfo *inInfo = *inQueue.begin();
        OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
        if (inHeader == NULL) {
            inQueue.erase(inQueue.begin());
            inInfo->mOwnedByUs = false;
            continue;
        }

        PortInfo *port = editPortInfo(1);

        OMX_BUFFERHEADERTYPE *outHeader =
            port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader;

        if (inHeader->nFilledLen == 0) {
            inQueue.erase(inQueue.begin());
            inInfo->mOwnedByUs = false;
            notifyEmptyBufferDone(inHeader);

            ++mInputBufferCount;

            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
                outHeader->nFilledLen = 0;
                outHeader->nFlags = OMX_BUFFERFLAG_EOS;

                List<BufferInfo *>::iterator it = outQueue.begin();
                while ((*it)->mHeader != outHeader) {
                    ++it;
                }

                BufferInfo *outInfo = *it;
                outInfo->mOwnedByUs = false;
                outQueue.erase(it);
                outInfo = NULL;

                notifyFillBufferDone(outHeader);
                outHeader = NULL;
            }
            return;
        }

        uint8_t *bitstream = inHeader->pBuffer + inHeader->nOffset;
        uint32_t *start_code = (uint32_t *)bitstream;
        bool volHeader = *start_code == 0xB0010000;
        if (volHeader) {
            PVCleanUpVideoDecoder(mHandle);
            mInitialized = false;
        }

        if (!mInitialized) {
            uint8_t *vol_data[1];
            int32_t vol_size = 0;

            vol_data[0] = NULL;

            if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) || volHeader) {
                vol_data[0] = bitstream;
                vol_size = inHeader->nFilledLen;
            }

            MP4DecodingMode mode =
                (mMode == MODE_MPEG4) ? MPEG4_MODE : H263_MODE;

            Bool success = PVInitVideoDecoder(
                    mHandle, vol_data, &vol_size, 1,
                    outputBufferWidth(), outputBufferHeight(), mode);

            if (!success) {
                ALOGW("PVInitVideoDecoder failed. Unsupported content?");

                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return;
            }

            MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle);
            if (mode != actualMode) {
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return;
            }

            PVSetPostProcType((VideoDecControls *) mHandle, 0);

            bool hasFrameData = false;
            if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
                inInfo->mOwnedByUs = false;
                inQueue.erase(inQueue.begin());
                inInfo = NULL;
                notifyEmptyBufferDone(inHeader);
                inHeader = NULL;
            } else if (volHeader) {
                hasFrameData = true;
            }

            mInitialized = true;

            if (mode == MPEG4_MODE && handlePortSettingsChange()) {
                return;
            }

            if (!hasFrameData) {
                continue;
            }
        }

        if (!mFramesConfigured) {
            PortInfo *port = editPortInfo(1);
            OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(1).mHeader;

            OMX_U32 yFrameSize = sizeof(uint8) * mHandle->size;
            if ((outHeader->nAllocLen < yFrameSize) ||
                    (outHeader->nAllocLen - yFrameSize < yFrameSize / 2)) {
                ALOGE("Too small output buffer for reference frame: %lu bytes",
                        (unsigned long)outHeader->nAllocLen);
                android_errorWriteLog(0x534e4554, "30033990");
                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
                mSignalledError = true;
                return;
            }
            PVSetReferenceYUV(mHandle, outHeader->pBuffer);
            mFramesConfigured = true;
        }

        uint32_t useExtTimestamp = (inHeader->nOffset == 0);

        // decoder deals in ms (int32_t), OMX in us (int64_t)
        // so use fake timestamp instead
        uint32_t timestamp = 0xFFFFFFFF;
        if (useExtTimestamp) {
            mPvToOmxTimeMap.add(mPvTime, inHeader->nTimeStamp);
            timestamp = mPvTime;
            mPvTime++;
        }

        int32_t bufferSize = inHeader->nFilledLen;
        int32_t tmp = bufferSize;

        OMX_U32 frameSize;
        OMX_U64 yFrameSize = (OMX_U64)mWidth * (OMX_U64)mHeight;
        if (yFrameSize > ((OMX_U64)UINT32_MAX / 3) * 2) {
            ALOGE("Frame size too large");
            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
            mSignalledError = true;
            return;
        }
        frameSize = (OMX_U32)(yFrameSize + (yFrameSize / 2));

        if (outHeader->nAllocLen < frameSize) {
            android_errorWriteLog(0x534e4554, "27833616");
            ALOGE("Insufficient output buffer size");
            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
            mSignalledError = true;
            return;
        }
        // The PV decoder is lying to us, sometimes it'll claim to only have
        // consumed a subset of the buffer when it clearly consumed all of it.
        // ignore whatever it says...
        if (PVDecodeVideoFrame(
                    mHandle, &bitstream, &timestamp, &tmp,
                    &useExtTimestamp,
                    outHeader->pBuffer) != PV_TRUE) {
            ALOGE("failed to decode video frame.");

            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
            mSignalledError = true;
            return;
        }

        // H263 doesn't have VOL header, the frame size information is in short header, i.e. the
        // decoder may detect size change after PVDecodeVideoFrame.
        if (handlePortSettingsChange()) {
            return;
        }

        // decoder deals in ms, OMX in us.
        outHeader->nTimeStamp = mPvToOmxTimeMap.valueFor(timestamp);
        mPvToOmxTimeMap.removeItem(timestamp);

        inHeader->nOffset += bufferSize;
        inHeader->nFilledLen = 0;
        if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
            outHeader->nFlags = OMX_BUFFERFLAG_EOS;
        } else {
            outHeader->nFlags = 0;
        }

        if (inHeader->nFilledLen == 0) {
            inInfo->mOwnedByUs = false;
            inQueue.erase(inQueue.begin());
            inInfo = NULL;
            notifyEmptyBufferDone(inHeader);
            inHeader = NULL;
        }

        ++mInputBufferCount;

        outHeader->nOffset = 0;
        outHeader->nFilledLen = frameSize;

        List<BufferInfo *>::iterator it = outQueue.begin();
        while ((*it)->mHeader != outHeader) {
            ++it;
        }

        BufferInfo *outInfo = *it;
        outInfo->mOwnedByUs = false;
        outQueue.erase(it);
        outInfo = NULL;

        notifyFillBufferDone(outHeader);
        outHeader = NULL;

        ++mNumSamplesOutput;
    }
}