OMX_ERRORTYPE OMXVideoDecoderAVC::PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p) {
    OMX_ERRORTYPE ret;
    ret = OMXVideoDecoderBase::PrepareDecodeBuffer(buffer, retain, p);
    CHECK_RETURN_VALUE("OMXVideoDecoderBase::PrepareDecodeBuffer");

    // OMX_BUFFERFLAG_CODECCONFIG is an optional flag
    // if flag is set, buffer will only contain codec data.
    if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
        LOGV("Received AVC codec data.");
        return ret;
    }

    // OMX_BUFFERFLAG_ENDOFFRAME is an optional flag
    if (buffer->nFlags & (OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS)) {
        // TODO: if OMX_BUFFERFLAG_ENDOFFRAME indicates end of a NAL unit and in OMXVideoDecodeBase
        // we set buffer flag to HAS_COMPLETE_FRAME,  corruption will happen
        mTimeStamp = buffer->nTimeStamp;
        if (mFilledLen == 0) {
            // buffer is not accumulated and it contains a complete frame
            return ret;
        }
        // buffer contains  the last part of fragmented frame
        ret = AccumulateBuffer(buffer);
        CHECK_RETURN_VALUE("AccumulateBuffer");
        ret = FillDecodeBuffer(p);
        CHECK_RETURN_VALUE("FillDecodeBuffer");
        return ret;
    }

    LOGW("Received fragmented buffer.");
    // use time stamp to determine frame boundary
    if (mTimeStamp == INVALID_PTS) {
        // first ever buffer
        mTimeStamp = buffer->nTimeStamp;
    }

    if (mTimeStamp != buffer->nTimeStamp && mFilledLen != 0) {
        // buffer accumulated contains a complete frame
        ret = FillDecodeBuffer(p);
        CHECK_RETURN_VALUE("FillDecodeBuffer");
        // retain the current buffer
        *retain = BUFFER_RETAIN_GETAGAIN;
    } else {
        // buffer accumulation for beginning of fragmented buffer (mFilledLen == 0) or
        // middle/end of fragmented buffer (mFilledLen != 0)
        ret = AccumulateBuffer(buffer);
        CHECK_RETURN_VALUE("AccumulateBuffer");
        ret = OMX_ErrorNotReady;
    }

    if (buffer->nFilledLen != 0) {
        mTimeStamp = buffer->nTimeStamp;
    }
    return ret;
}
OMX_ERRORTYPE OMXVideoDecoderAVCSecure::PrepareConfigBuffer(VideoConfigBuffer *p) {
    OMX_ERRORTYPE ret;
	ret = OMXVideoDecoderBase::PrepareConfigBuffer(p);
    CHECK_RETURN_VALUE("OMXVideoDecoderBase::PrepareConfigBuffer");
    p->flag |=  WANT_SURFACE_PROTECTION;
    return ret;
}
OMX_ERRORTYPE OMXVideoDecoderAVC::PrepareConfigBuffer(VideoConfigBuffer *p) {
    OMX_ERRORTYPE ret;

    ret = OMXVideoDecoderBase::PrepareConfigBuffer(p);
    CHECK_RETURN_VALUE("OMXVideoDecoderBase::PrepareConfigBuffer");

    if (mParamAvc.eProfile == OMX_VIDEO_AVCProfileBaseline) {
        p->flag |= WANT_LOW_DELAY;
    }

    if (mDecodeSettings.nMaxWidth == 0 ||
        mDecodeSettings.nMaxHeight == 0) {
        return OMX_ErrorNone;
    }

    LOGW("AVC Video decoder used in Video Conferencing Mode.");

    // For video conferencing application
    p->width = mDecodeSettings.nMaxWidth;
    p->height = mDecodeSettings.nMaxHeight;
    p->profile = VAProfileH264ConstrainedBaseline;
    if(!(p->flag & USE_NATIVE_GRAPHIC_BUFFER)) {
        p->surfaceNumber = mDecodeSettings.nMaxNumberOfReferenceFrame + EXTRA_REFERENCE_FRAME;
        p->flag = WANT_ERROR_CONCEALMENT | WANT_LOW_DELAY | HAS_SURFACE_NUMBER | HAS_VA_PROFILE;
    } else {
        p->flag |= WANT_ERROR_CONCEALMENT | WANT_LOW_DELAY | HAS_SURFACE_NUMBER | HAS_VA_PROFILE;
    }

    return OMX_ErrorNone;
}
OMX_ERRORTYPE OMXVideoDecoderBase::ProcessorReset(void) {
    OMX_ERRORTYPE ret;
    VideoConfigBuffer configBuffer;
    // reset the configbuffer and set it to mix
    ret = PrepareConfigBuffer(&configBuffer);
    CHECK_RETURN_VALUE("PrepareConfigBuffer");
    mVideoDecoder->reset(&configBuffer);
    return OMX_ErrorNone;
}
OMX_ERRORTYPE OMXVideoDecoderAVCSecure::PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p) {
    OMX_ERRORTYPE ret;

    ret = OMXVideoDecoderBase::PrepareDecodeNativeHandleBuffer(buffer, retain, p);
    CHECK_RETURN_VALUE("OMXVideoDecoderBase::PrepareDecodeBuffer");

    if (buffer->nFilledLen == 0) {
        return OMX_ErrorNone;
    }
    native_handle_t *native_handle = (native_handle_t *)buffer->pBuffer;

    ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];

    // Check that we are dealing with the right buffer
    if (dataBuffer->magic != PROTECTED_DATA_BUFFER_MAGIC)
    {
        if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG)
        {
            // Processing codec data, which is not in ProtectedDataBuffer format
            ALOGI("%s: received AVC codec data (%" PRIu32 " bytes).", __FUNCTION__, buffer->nFilledLen);
            DumpBuffer2("OMX: AVC codec data: ", buffer->pBuffer, buffer->nFilledLen) ;
            return OMX_ErrorNone;
        }
        else
        {
            // Processing non-codec data, but this buffer is not in ProtectedDataBuffer format
            ALOGE("%s: protected data buffer pointer %p doesn't have the right magic", __FUNCTION__, dataBuffer);
            return OMX_ErrorBadParameter;
        }
    }
    // End of magic check

    if(dataBuffer->drmScheme == DRM_SCHEME_WVC){

        // OMX_BUFFERFLAG_CODECCONFIG is an optional flag
        // if flag is set, buffer will only contain codec data.
        mDrmScheme = DRM_SCHEME_WVC;
        if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
               ALOGV("Received AVC codec data.");
               return ret;
        }
        return PrepareWVCDecodeBuffer(buffer, retain, p);
    }
    else if(dataBuffer->drmScheme == DRM_SCHEME_CENC) {
        mDrmScheme = DRM_SCHEME_CENC;
        return PrepareCENCDecodeBuffer(buffer, retain, p);
    }
    else if(dataBuffer->drmScheme == DRM_SCHEME_PRASF)
    {
        mDrmScheme = DRM_SCHEME_PRASF;
        return  PreparePRASFDecodeBuffer(buffer, retain, p);
    }
    return ret;
}
Example #6
0
	bool TcpListener::listen(const char* path){
		if(!path || strlen(path)==0){
			return false;
		}
		ASSIGN_POINTER(m_path, STR(path));
		CHECK_RETURN_VALUE(m_path, false);
		struct sockaddr_un sun;
		memset(&sun, 0, sizeof(sun));
		sun.sun_family =AF_UNIX;
		strcpy(sun.sun_path, m_path->c_str());
		return _listen(AF_UNIX, reinterpret_cast< struct sockaddr* >(&sun), SUN_LEN(&sun));
	}
OMX_ERRORTYPE OMXVideoDecoderBase::ProcessorInit(void) {
    OMX_ERRORTYPE ret;
    ret = OMXComponentCodecBase::ProcessorInit();
    CHECK_RETURN_VALUE("OMXComponentCodecBase::ProcessorInit");

    if (mVideoDecoder == NULL) {
        LOGE("ProcessorInit: Video decoder is not created.");
        return OMX_ErrorDynamicResourcesUnavailable;
    }

    VideoConfigBuffer configBuffer;
    ret = PrepareConfigBuffer(&configBuffer);
    CHECK_RETURN_VALUE("PrepareConfigBuffer");

    //pthread_mutex_lock(&mSerializationLock);
    Decode_Status status = mVideoDecoder->start(&configBuffer);
    //pthread_mutex_unlock(&mSerializationLock);

    if (status != DECODE_SUCCESS) {
        return TranslateDecodeStatus(status);
    }

    return OMX_ErrorNone;
}
OMX_ERRORTYPE OMXVideoDecoderAVCSecure::PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p) {
    OMX_ERRORTYPE ret;
    ret = OMXVideoDecoderBase::PrepareDecodeBuffer(buffer, retain, p);
    CHECK_RETURN_VALUE("OMXVideoDecoderBase::PrepareDecodeBuffer");

    if (buffer->nFilledLen == 0) {
        return OMX_ErrorNone;
    }
    // OMX_BUFFERFLAG_CODECCONFIG is an optional flag
    // if flag is set, buffer will only contain codec data.
    if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
        LOGV("Received AVC codec data.");
        return ret;
    }
    p->flag |= HAS_COMPLETE_FRAME;

    if (buffer->nOffset != 0) {
        LOGW("buffer offset %d is not zero!!!", buffer->nOffset);
    }

    IMRDataBuffer *imrBuffer = (IMRDataBuffer *)buffer->pBuffer;
    if (imrBuffer->clear) {
        p->data = imrBuffer->data + buffer->nOffset;
        p->size = buffer->nFilledLen;
    } else {
        imrBuffer->size = NALU_BUFFER_SIZE;
        sec_result_t res = Drm_WV_ReturnNALUHeaders(WV_SESSION_ID, imrBuffer->offset, buffer->nFilledLen, imrBuffer->data, (uint32_t *)&(imrBuffer->size));
        if (res == DRM_FAIL_FW_SESSION) {
            LOGW("Drm_WV_ReturnNALUHeaders failed. Session is disabled.");
            mSessionPaused = true;
            ret =  OMX_ErrorNotReady;
        } else if (res != 0) {
            mSessionPaused = false;
            LOGE("Drm_WV_ReturnNALUHeaders failed. Error = %#x, IMR offset = %d, len = %d", res, imrBuffer->offset, buffer->nFilledLen);
            ret = OMX_ErrorHardware;
        } else {
            mSessionPaused = false;
            p->data = imrBuffer->data;
            p->size = imrBuffer->size;
            p->flag |= IS_SECURE_DATA;
        }
    }

    //reset IMR size
    imrBuffer->size = NALU_BUFFER_SIZE;
    return ret;
}
OMX_ERRORTYPE OMXVideoDecoderBase::ProcessorProcess(
    OMX_BUFFERHEADERTYPE ***pBuffers,
    buffer_retain_t *retains,
    OMX_U32) {

    OMX_ERRORTYPE ret;
    Decode_Status status;
    OMX_BOOL isResolutionChange = OMX_FALSE;
    // fill render buffer without draining decoder output queue
    ret = FillRenderBuffer(pBuffers[OUTPORT_INDEX], &retains[OUTPORT_INDEX], 0, &isResolutionChange);
    if (ret == OMX_ErrorNone) {
        retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
        if (isResolutionChange) {
            HandleFormatChange();
        }
        // TODO: continue decoding
        return ret;
    } else if (ret != OMX_ErrorNotReady) {
        return ret;
    }

    VideoDecodeBuffer decodeBuffer;
    // PrepareDecodeBuffer will set retain to either BUFFER_RETAIN_GETAGAIN or BUFFER_RETAIN_NOT_RETAIN
    ret = PrepareDecodeBuffer(*pBuffers[INPORT_INDEX], &retains[INPORT_INDEX], &decodeBuffer);
    if (ret == OMX_ErrorNotReady) {
        retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
        return OMX_ErrorNone;
    } else if (ret != OMX_ErrorNone) {
        return ret;
    }

    if (decodeBuffer.size != 0) {
        //pthread_mutex_lock(&mSerializationLock);
        status = mVideoDecoder->decode(&decodeBuffer);
        //pthread_mutex_unlock(&mSerializationLock);

        if (status == DECODE_FORMAT_CHANGE) {
            ret = HandleFormatChange();
            CHECK_RETURN_VALUE("HandleFormatChange");
            ((*pBuffers[OUTPORT_INDEX]))->nFilledLen = 0;
            retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
            retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
            // real dynamic resolution change will be handled later
            // Here is just a temporary workaround
            // don't use the output buffer if format is changed.
            return OMX_ErrorNone;
        } else if (status == DECODE_NO_CONFIG) {
            LOGW("Decoder returns DECODE_NO_CONFIG.");
            retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
            return OMX_ErrorNone;
        } else if (status == DECODE_NO_REFERENCE) {
            LOGW("Decoder returns DECODE_NO_REFERENCE.");
            //retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
            //return OMX_ErrorNone;
        } else if (status == DECODE_MULTIPLE_FRAME){
            if (decodeBuffer.ext != NULL && decodeBuffer.ext->extType == PACKED_FRAME_TYPE && decodeBuffer.ext->extData != NULL) {
                PackedFrameData* nextFrame = (PackedFrameData*)decodeBuffer.ext->extData;
                (*pBuffers[INPORT_INDEX])->nOffset += nextFrame->offSet;
                (*pBuffers[INPORT_INDEX])->nTimeStamp = nextFrame->timestamp;
                (*pBuffers[INPORT_INDEX])->nFilledLen -= nextFrame->offSet;
                retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
                LOGW("Find multiple frames in a buffer, next frame offset = %d, timestamp = %lld", (*pBuffers[INPORT_INDEX])->nOffset, (*pBuffers[INPORT_INDEX])->nTimeStamp);
            }
        }
        else if (status != DECODE_SUCCESS && status != DECODE_FRAME_DROPPED) {
            if (checkFatalDecoderError(status)) {
                return TranslateDecodeStatus(status);
            } else {
                // For decoder errors that could be omitted,  not throw error and continue to decode.
                TranslateDecodeStatus(status);

                ((*pBuffers[OUTPORT_INDEX]))->nFilledLen = 0;

                // Do not return, and try to drain the output queue
                // retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
                // return OMX_ErrorNone;
            }
        }
    }
    // drain the decoder output queue when in EOS state and fill the render buffer
    ret = FillRenderBuffer(pBuffers[OUTPORT_INDEX], &retains[OUTPORT_INDEX],
            ((*pBuffers[INPORT_INDEX]))->nFlags,&isResolutionChange);

    if (isResolutionChange) {
        HandleFormatChange();
    }

    bool inputEoS = ((*pBuffers[INPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS);
    bool outputEoS = ((*pBuffers[OUTPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS);
    // if output port is not eos, retain the input buffer until all the output buffers are drained.
    if (inputEoS && !outputEoS) {
        retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
        // the input buffer is retained for draining purpose. Set nFilledLen to 0 so buffer will not be decoded again.
        (*pBuffers[INPORT_INDEX])->nFilledLen = 0;
    }

    if (ret == OMX_ErrorNotReady) {
        retains[OUTPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
        ret = OMX_ErrorNone;
    }

    return ret;
}