void SoftVideoDecoderOMXComponent::handlePortSettingsChange( bool *portWillReset, uint32_t width, uint32_t height, CropSettingsMode cropSettingsMode, bool fakeStride) { *portWillReset = false; bool sizeChanged = (width != mWidth || height != mHeight); bool updateCrop = (cropSettingsMode == kCropUnSet); bool cropChanged = (cropSettingsMode == kCropChanged); bool strideChanged = false; if (fakeStride) { OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef; if (def->format.video.nStride != (OMX_S32)width || def->format.video.nSliceHeight != (OMX_U32)height) { strideChanged = true; } } if (sizeChanged || cropChanged || strideChanged) { mWidth = width; mHeight = height; if ((sizeChanged && !mIsAdaptive) || width > mAdaptiveMaxWidth || height > mAdaptiveMaxHeight) { if (mIsAdaptive) { if (width > mAdaptiveMaxWidth) { mAdaptiveMaxWidth = width; } if (height > mAdaptiveMaxHeight) { mAdaptiveMaxHeight = height; } } updatePortDefinitions(updateCrop); notify(OMX_EventPortSettingsChanged, kOutputPortIndex, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; *portWillReset = true; } else { updatePortDefinitions(updateCrop); if (fakeStride) { // MAJOR HACK that is not pretty, it's just to fool the renderer to read the correct // data. // Some software decoders (e.g. SoftMPEG4) fill decoded frame directly to output // buffer without considering the output buffer stride and slice height. So this is // used to signal how the buffer is arranged. The alternative is to re-arrange the // output buffer in SoftMPEG4, but that results in memcopies. OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef; def->format.video.nStride = mWidth; def->format.video.nSliceHeight = mHeight; } notify(OMX_EventPortSettingsChanged, kOutputPortIndex, OMX_IndexConfigCommonOutputCrop, NULL); } } }
void SoftVideoDecoderOMXComponent::initPorts( OMX_U32 numInputBuffers, OMX_U32 inputBufferSize, OMX_U32 numOutputBuffers, const char *mimeType, OMX_U32 minCompressionRatio) { mMinInputBufferSize = inputBufferSize; mMinCompressionRatio = minCompressionRatio; OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); def.nPortIndex = kInputPortIndex; def.eDir = OMX_DirInput; def.nBufferCountMin = numInputBuffers; def.nBufferCountActual = def.nBufferCountMin; def.nBufferSize = inputBufferSize; def.bEnabled = OMX_TRUE; def.bPopulated = OMX_FALSE; def.eDomain = OMX_PortDomainVideo; def.bBuffersContiguous = OMX_FALSE; def.nBufferAlignment = 1; def.format.video.cMIMEType = const_cast<char *>(mimeType); def.format.video.pNativeRender = NULL; /* size is initialized in updatePortDefinitions() */ def.format.video.nBitrate = 0; def.format.video.xFramerate = 0; def.format.video.bFlagErrorConcealment = OMX_FALSE; def.format.video.eCompressionFormat = mCodingType; def.format.video.eColorFormat = OMX_COLOR_FormatUnused; def.format.video.pNativeWindow = NULL; addPort(def); def.nPortIndex = kOutputPortIndex; def.eDir = OMX_DirOutput; def.nBufferCountMin = numOutputBuffers; def.nBufferCountActual = def.nBufferCountMin; def.bEnabled = OMX_TRUE; def.bPopulated = OMX_FALSE; def.eDomain = OMX_PortDomainVideo; def.bBuffersContiguous = OMX_FALSE; def.nBufferAlignment = 2; def.format.video.cMIMEType = const_cast<char *>("video/raw"); def.format.video.pNativeRender = NULL; /* size is initialized in updatePortDefinitions() */ def.format.video.nBitrate = 0; def.format.video.xFramerate = 0; def.format.video.bFlagErrorConcealment = OMX_FALSE; def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; def.format.video.pNativeWindow = NULL; addPort(def); updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */); }
bool SoftMPEG4::portSettingsChanged() { uint32_t disp_width, disp_height; PVGetVideoDimensions(mHandle, (int32 *)&disp_width, (int32 *)&disp_height); uint32_t buf_width, buf_height; PVGetBufferDimensions(mHandle, (int32 *)&buf_width, (int32 *)&buf_height); CHECK_LE(disp_width, buf_width); CHECK_LE(disp_height, buf_height); ALOGV("disp_width = %d, disp_height = %d, buf_width = %d, buf_height = %d", disp_width, disp_height, buf_width, buf_height); if (mCropWidth != disp_width || mCropHeight != disp_height) { mCropLeft = 0; mCropTop = 0; mCropWidth = disp_width; mCropHeight = disp_height; notify(OMX_EventPortSettingsChanged, 1, OMX_IndexConfigCommonOutputCrop, NULL); } if (buf_width != mWidth || buf_height != mHeight) { mWidth = buf_width; mHeight = buf_height; updatePortDefinitions(); if (mMode == MODE_H263) { PVCleanUpVideoDecoder(mHandle); uint8_t *vol_data[1]; int32_t vol_size = 0; vol_data[0] = NULL; if (!PVInitVideoDecoder( mHandle, vol_data, &vol_size, 1, mWidth, mHeight, H263_MODE)) { notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return true; } } mFramesConfigured = false; notify(OMX_EventPortSettingsChanged, 1, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; return true; } return false; }
bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) { if (mWidth != info->picWidth || mHeight != info->picHeight) { mWidth = info->picWidth; mHeight = info->picHeight; mPictureSize = mWidth * mHeight * 3 / 2; mCropWidth = mWidth; mCropHeight = mHeight; updatePortDefinitions(); notify(OMX_EventPortSettingsChanged, 1, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; return true; } return false; }
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter( OMX_INDEXTYPE index, const OMX_PTR params) { // Include extension index OMX_INDEXEXTTYPE. const int32_t indexFull = index; switch (indexFull) { case OMX_IndexParamStandardComponentRole: { const OMX_PARAM_COMPONENTROLETYPE *roleParams = (const OMX_PARAM_COMPONENTROLETYPE *)params; if (strncmp((const char *)roleParams->cRole, mComponentRole, OMX_MAX_STRINGNAME_SIZE - 1)) { return OMX_ErrorUndefined; } return OMX_ErrorNone; } case OMX_IndexParamVideoPortFormat: { OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; if (formatParams->nPortIndex > kMaxPortIndex) { return OMX_ErrorUndefined; } if (formatParams->nIndex != 0) { return OMX_ErrorNoMore; } return OMX_ErrorNone; } case kPrepareForAdaptivePlaybackIndex: { const PrepareForAdaptivePlaybackParams* adaptivePlaybackParams = (const PrepareForAdaptivePlaybackParams *)params; mIsAdaptive = adaptivePlaybackParams->bEnable; if (mIsAdaptive) { mAdaptiveMaxWidth = adaptivePlaybackParams->nMaxFrameWidth; mAdaptiveMaxHeight = adaptivePlaybackParams->nMaxFrameHeight; mWidth = mAdaptiveMaxWidth; mHeight = mAdaptiveMaxHeight; } else { mAdaptiveMaxWidth = 0; mAdaptiveMaxHeight = 0; } updatePortDefinitions(); return OMX_ErrorNone; } case OMX_IndexParamPortDefinition: { OMX_PARAM_PORTDEFINITIONTYPE *newParams = (OMX_PARAM_PORTDEFINITIONTYPE *)params; OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &newParams->format.video; OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(newParams->nPortIndex)->mDef; uint32_t oldWidth = def->format.video.nFrameWidth; uint32_t oldHeight = def->format.video.nFrameHeight; uint32_t newWidth = video_def->nFrameWidth; uint32_t newHeight = video_def->nFrameHeight; if (newWidth != oldWidth || newHeight != oldHeight) { bool outputPort = (newParams->nPortIndex == kOutputPortIndex); def->format.video.nFrameWidth = (mIsAdaptive && outputPort) ? mAdaptiveMaxWidth : newWidth; def->format.video.nFrameHeight = (mIsAdaptive && outputPort) ? mAdaptiveMaxHeight : newHeight; def->format.video.nStride = def->format.video.nFrameWidth; def->format.video.nSliceHeight = def->format.video.nFrameHeight; def->nBufferSize = def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2; if (outputPort) { mWidth = newWidth; mHeight = newHeight; mCropLeft = 0; mCropTop = 0; mCropWidth = newWidth; mCropHeight = newHeight; } newParams->nBufferSize = def->nBufferSize; } return SimpleSoftOMXComponent::internalSetParameter(index, params); } default: return SimpleSoftOMXComponent::internalSetParameter(index, params); } }
void SoftFFmpegVideo::onQueueFilled(OMX_U32 portIndex) { int err = 0; if (mSignalledError || mOutputPortSettingsChange != NONE) { return; } List<BufferInfo *> &inQueue = getPortQueue(0); List<BufferInfo *> &outQueue = getPortQueue(1); while (!inQueue.empty() && !outQueue.empty()) { BufferInfo *inInfo = *inQueue.begin(); OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; if (mCtx->width != mWidth || mCtx->height != mHeight) { mCtx->width = mWidth; mCtx->height = mHeight; mStride = mWidth; updatePortDefinitions(); notify(OMX_EventPortSettingsChanged, 1, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; return; } if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { inQueue.erase(inQueue.begin()); inInfo->mOwnedByUs = false; notifyEmptyBufferDone(inHeader); outHeader->nFilledLen = 0; outHeader->nFlags = OMX_BUFFERFLAG_EOS; outQueue.erase(outQueue.begin()); outInfo->mOwnedByUs = false; notifyFillBufferDone(outHeader); return; } if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) { LOGI("got extradata, ignore: %d, size: %lu", mIgnoreExtradata, inHeader->nFilledLen); hexdump(inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen); if (!mExtradataReady && !mIgnoreExtradata) { //if (mMode == MODE_H264) // it is possible to receive multiple input buffer with OMX_BUFFERFLAG_CODECCONFIG flag. // for example, H264, the first input buffer is SPS, and another is PPS! int orig_extradata_size = mCtx->extradata_size; mCtx->extradata_size += inHeader->nFilledLen; mCtx->extradata = (uint8_t *)realloc(mCtx->extradata, mCtx->extradata_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!mCtx->extradata) { LOGE("ffmpeg video decoder failed to alloc extradata memory."); notify(OMX_EventError, OMX_ErrorInsufficientResources, 0, NULL); mSignalledError = true; return; } memcpy(mCtx->extradata + orig_extradata_size, inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen); memset(mCtx->extradata + mCtx->extradata_size, 0, FF_INPUT_BUFFER_PADDING_SIZE); inInfo->mOwnedByUs = false; inQueue.erase(inQueue.begin()); inInfo = NULL; notifyEmptyBufferDone(inHeader); inHeader = NULL; continue; } if (mIgnoreExtradata) { LOGI("got extradata, size: %lu, but ignore it", inHeader->nFilledLen); inInfo->mOwnedByUs = false; inQueue.erase(inQueue.begin()); inInfo = NULL; notifyEmptyBufferDone(inHeader); inHeader = NULL; continue; } } AVPacket pkt; av_init_packet(&pkt); pkt.data = (uint8_t *)inHeader->pBuffer + inHeader->nOffset; pkt.size = inHeader->nFilledLen; pkt.pts = inHeader->nTimeStamp; #if DEBUG_PKT LOGV("pkt size: %d, pts: %lld", pkt.size, pkt.pts); #endif if (!mExtradataReady) { LOGI("extradata is ready"); hexdump(mCtx->extradata, mCtx->extradata_size); LOGI("open ffmpeg decoder now"); mExtradataReady = true; err = avcodec_open2(mCtx, mCtx->codec, NULL); if (err < 0) { LOGE("ffmpeg video decoder failed to initialize. (%d)", err); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; return; } } int gotPic = false; AVFrame *frame = avcodec_alloc_frame(); err = avcodec_decode_video2(mCtx, frame, &gotPic, &pkt); if (err < 0) { LOGE("ffmpeg video decoder failed to decode frame. (%d)", err); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; av_free(frame); return; } if (gotPic) { AVPicture pict; int64_t pts = AV_NOPTS_VALUE; uint8_t *dst = outHeader->pBuffer; memset(&pict, 0, sizeof(AVPicture)); pict.data[0] = dst; pict.data[1] = dst + mStride * mHeight; pict.data[2] = pict.data[1] + (mStride / 2 * mHeight / 2); pict.linesize[0] = mStride; pict.linesize[1] = mStride / 2; pict.linesize[2] = mStride / 2; int sws_flags = SWS_BICUBIC; mImgConvertCtx = sws_getCachedContext(mImgConvertCtx, mWidth, mHeight, mCtx->pix_fmt, mWidth, mHeight, PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL); if (mImgConvertCtx == NULL) { LOGE("Cannot initialize the conversion context"); notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); mSignalledError = true; av_free(frame); return; } sws_scale(mImgConvertCtx, frame->data, frame->linesize, 0, mHeight, pict.data, pict.linesize); outHeader->nOffset = 0; outHeader->nFilledLen = (mStride * mHeight * 3) / 2; outHeader->nFlags = 0; if (frame->key_frame) outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME; // process timestamps if (decoder_reorder_pts == -1) { pts = *(int64_t*)av_opt_ptr(avcodec_get_frame_class(), frame, "best_effort_timestamp"); } else if (decoder_reorder_pts) { pts = frame->pkt_pts; } else { pts = frame->pkt_dts; } if (pts == AV_NOPTS_VALUE) { pts = 0; } outHeader->nTimeStamp = pts; #if DEBUG_FRM LOGV("frame pts: %lld", pts); #endif outInfo->mOwnedByUs = false; outQueue.erase(outQueue.begin()); outInfo = NULL; notifyFillBufferDone(outHeader); outHeader = NULL; } inInfo->mOwnedByUs = false; inQueue.erase(inQueue.begin()); inInfo = NULL; notifyEmptyBufferDone(inHeader); inHeader = NULL; av_free(frame); } }
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter( OMX_INDEXTYPE index, const OMX_PTR params) { // Include extension index OMX_INDEXEXTTYPE. const int32_t indexFull = index; switch (indexFull) { case OMX_IndexParamStandardComponentRole: { const OMX_PARAM_COMPONENTROLETYPE *roleParams = (const OMX_PARAM_COMPONENTROLETYPE *)params; if (strncmp((const char *)roleParams->cRole, mComponentRole, OMX_MAX_STRINGNAME_SIZE - 1)) { return OMX_ErrorUndefined; } return OMX_ErrorNone; } case OMX_IndexParamVideoPortFormat: { OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; if (formatParams->nPortIndex > kMaxPortIndex) { return OMX_ErrorBadPortIndex; } if (formatParams->nIndex != 0) { return OMX_ErrorNoMore; } if (formatParams->nPortIndex == kInputPortIndex) { if (formatParams->eCompressionFormat != mCodingType || formatParams->eColorFormat != OMX_COLOR_FormatUnused) { return OMX_ErrorUnsupportedSetting; } } else { if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused || formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) { return OMX_ErrorUnsupportedSetting; } } return OMX_ErrorNone; } case kPrepareForAdaptivePlaybackIndex: { const PrepareForAdaptivePlaybackParams* adaptivePlaybackParams = (const PrepareForAdaptivePlaybackParams *)params; mIsAdaptive = adaptivePlaybackParams->bEnable; if (mIsAdaptive) { mAdaptiveMaxWidth = adaptivePlaybackParams->nMaxFrameWidth; mAdaptiveMaxHeight = adaptivePlaybackParams->nMaxFrameHeight; mWidth = mAdaptiveMaxWidth; mHeight = mAdaptiveMaxHeight; } else { mAdaptiveMaxWidth = 0; mAdaptiveMaxHeight = 0; } updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */); return OMX_ErrorNone; } case OMX_IndexParamPortDefinition: { OMX_PARAM_PORTDEFINITIONTYPE *newParams = (OMX_PARAM_PORTDEFINITIONTYPE *)params; OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &newParams->format.video; OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(newParams->nPortIndex)->mDef; uint32_t oldWidth = def->format.video.nFrameWidth; uint32_t oldHeight = def->format.video.nFrameHeight; uint32_t newWidth = video_def->nFrameWidth; uint32_t newHeight = video_def->nFrameHeight; if (newWidth != oldWidth || newHeight != oldHeight) { bool outputPort = (newParams->nPortIndex == kOutputPortIndex); if (outputPort) { // only update (essentially crop) if size changes mWidth = newWidth; mHeight = newHeight; updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */); // reset buffer size based on frame size newParams->nBufferSize = def->nBufferSize; } else { // For input port, we only set nFrameWidth and nFrameHeight. Buffer size // is updated when configuring the output port using the max-frame-size, // though client can still request a larger size. def->format.video.nFrameWidth = newWidth; def->format.video.nFrameHeight = newHeight; } } return SimpleSoftOMXComponent::internalSetParameter(index, params); } default: return SimpleSoftOMXComponent::internalSetParameter(index, params); } }