already_AddRefed<VideoData> GonkVideoDecoderManager::CreateVideoDataFromGraphicBuffer(MediaBuffer* aSource, gfx::IntRect& aPicture) { sp<GraphicBuffer> srcBuffer(aSource->graphicBuffer()); RefPtr<TextureClient> textureClient; if (mNeedsCopyBuffer) { // Copy buffer contents for bug 1199809. if (!mCopyAllocator) { mCopyAllocator = new TextureClientRecycleAllocator(ImageBridgeChild::GetSingleton()); } if (!mCopyAllocator) { GVDM_LOG("Create buffer allocator failed!"); return nullptr; } gfx::IntSize size(Align(aPicture.width, 2) , Align(aPicture.height, 2)); textureClient = mCopyAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV, size, BackendSelector::Content, TextureFlags::DEFAULT, ALLOC_DISALLOW_BUFFERTEXTURECLIENT); if (!textureClient) { GVDM_LOG("Copy buffer allocation failed!"); return nullptr; } // Update size to match buffer's. aPicture.width = size.width; aPicture.height = size.height; sp<GraphicBuffer> destBuffer = static_cast<GrallocTextureClientOGL*>(textureClient.get())->GetGraphicBuffer(); CopyGraphicBuffer(srcBuffer, destBuffer); } else { textureClient = mNativeWindow->getTextureClientFromBuffer(srcBuffer.get()); textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this); GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); grallocClient->SetMediaBuffer(aSource); } RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo, mImageContainer, 0, // Filled later by caller. 0, // Filled later by caller. 1, // No way to pass sample duration from muxer to // OMX codec, so we hardcode the duration here. textureClient, false, // Filled later by caller. -1, aPicture); return data.forget(); }
nsresult GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v) { *v = nullptr; nsRefPtr<VideoData> data; int64_t timeUs; int32_t keyFrame; if (mVideoBuffer == nullptr) { GVDM_LOG("Video Buffer is not valid!"); return NS_ERROR_UNEXPECTED; } if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { GVDM_LOG("Decoder did not return frame time"); return NS_ERROR_UNEXPECTED; } int64_t duration; nsresult rv = QueueFrameTimeOut(timeUs, duration); NS_ENSURE_SUCCESS(rv, rv); if (mVideoBuffer->range_length() == 0) { // Some decoders may return spurious empty buffers that we just want to ignore // quoted from Android's AwesomePlayer.cpp ReleaseVideoBuffer(); return NS_ERROR_NOT_AVAILABLE; } if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { keyFrame = 0; } gfx::IntRect picture = ToIntRect(mPicture); if (mFrameInfo.mWidth != mInitialFrame.width || mFrameInfo.mHeight != mInitialFrame.height) { // Frame size is different from what the container reports. This is legal, // and we will preserve the ratio of the crop rectangle as it // was reported relative to the picture size reported by the container. picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width; picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height; picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width; picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height; } RefPtr<mozilla::layers::TextureClient> textureClient; if ((mVideoBuffer->graphicBuffer().get())) { textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get()); } if (textureClient) { GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); grallocClient->SetMediaBuffer(mVideoBuffer); textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this); data = VideoData::Create(mInfo.mVideo, mImageContainer, aStreamOffset, timeUs, duration, textureClient, keyFrame, -1, picture); } else { if (!mVideoBuffer->data()) { GVDM_LOG("No data in Video Buffer!"); return NS_ERROR_UNEXPECTED; } uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data(); int32_t stride = mFrameInfo.mStride; int32_t slice_height = mFrameInfo.mSliceHeight; // Converts to OMX_COLOR_FormatYUV420Planar if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) { ARect crop; crop.top = 0; crop.bottom = mFrameInfo.mHeight; crop.left = 0; crop.right = mFrameInfo.mWidth; yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight); if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(), mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) { ReleaseVideoBuffer(); GVDM_LOG("Color conversion failed!"); return NS_ERROR_UNEXPECTED; } stride = mFrameInfo.mWidth; slice_height = mFrameInfo.mHeight; } size_t yuv420p_y_size = stride * slice_height; size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2); uint8_t *yuv420p_y = yuv420p_buffer; uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size; uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size; // This is the approximate byte position in the stream. int64_t pos = aStreamOffset; VideoData::YCbCrBuffer b; b.mPlanes[0].mData = yuv420p_y; b.mPlanes[0].mWidth = mFrameInfo.mWidth; b.mPlanes[0].mHeight = mFrameInfo.mHeight; b.mPlanes[0].mStride = stride; b.mPlanes[0].mOffset = 0; b.mPlanes[0].mSkip = 0; b.mPlanes[1].mData = yuv420p_u; b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2; b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2; b.mPlanes[1].mStride = (stride + 1) / 2; b.mPlanes[1].mOffset = 0; b.mPlanes[1].mSkip = 0; b.mPlanes[2].mData = yuv420p_v; b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2; b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2; b.mPlanes[2].mStride = (stride + 1) / 2; b.mPlanes[2].mOffset = 0; b.mPlanes[2].mSkip = 0; data = VideoData::Create( mInfo.mVideo, mImageContainer, pos, timeUs, 1, // We don't know the duration. b, keyFrame, -1, picture); ReleaseVideoBuffer(); } data.forget(v); return NS_OK; }
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aTimeUs, bool aKeyframeSkip, bool aDoSeek) { if (!mVideoSource.get()) return false; ReleaseVideoBuffer(); status_t err; if (aDoSeek) { { Mutex::Autolock autoLock(mSeekLock); ReleaseAllPendingVideoBuffersLocked(); mIsVideoSeeking = true; } MediaSource::ReadOptions options; options.setSeekTo(aTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC); err = mVideoSource->read(&mVideoBuffer, &options); { Mutex::Autolock autoLock(mSeekLock); mIsVideoSeeking = false; PostReleaseVideoBuffer(nullptr, FenceHandle()); } aDoSeek = false; } else { err = mVideoSource->read(&mVideoBuffer); } aFrame->mSize = 0; if (err == OK) { int64_t timeUs; int32_t unreadable; int32_t keyFrame; size_t length = mVideoBuffer->range_length(); if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) { NS_WARNING("OMX decoder did not return frame time"); return false; } if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { keyFrame = 0; } if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) { unreadable = 0; } RefPtr<mozilla::layers::TextureClient> textureClient; if ((mVideoBuffer->graphicBuffer().get())) { textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get()); } if (textureClient) { // Manually increment reference count to keep MediaBuffer alive // during TextureClient is in use. mVideoBuffer->add_ref(); GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); grallocClient->SetMediaBuffer(mVideoBuffer); // Set recycle callback for TextureClient textureClient->SetRecycleCallback(OmxDecoder::RecycleCallback, this); { Mutex::Autolock autoLock(mPendingVideoBuffersLock); // Store pending recycle TextureClient. MOZ_ASSERT(mPendingRecycleTexutreClients.find(textureClient) == mPendingRecycleTexutreClients.end()); mPendingRecycleTexutreClients.insert(textureClient); } aFrame->mGraphicBuffer = textureClient; aFrame->mRotation = mVideoRotation; aFrame->mTimeUs = timeUs; aFrame->mKeyFrame = keyFrame; aFrame->Y.mWidth = mVideoWidth; aFrame->Y.mHeight = mVideoHeight; // Release to hold video buffer in OmxDecoder more. // MediaBuffer's ref count is changed from 2 to 1. ReleaseVideoBuffer(); } else if (length > 0) { char *data = static_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset(); if (unreadable) { LOG(PR_LOG_DEBUG, "video frame is unreadable"); } if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) { return false; } } // Check if this frame is valid or not. If not, skip it. if ((aKeyframeSkip && timeUs < aTimeUs) || length == 0) { aFrame->mShouldSkip = true; } } else if (err == INFO_FORMAT_CHANGED) { // If the format changed, update our cached info. if (!SetVideoFormat()) { return false; } else { return ReadVideo(aFrame, aTimeUs, aKeyframeSkip, aDoSeek); } } else if (err == ERROR_END_OF_STREAM) { return false; } else if (err == -ETIMEDOUT) { LOG(PR_LOG_DEBUG, "OmxDecoder::ReadVideo timed out, will retry"); return true; } else { // UNKNOWN_ERROR is sometimes is used to mean "out of memory", but // regardless, don't keep trying to decode if the decoder doesn't want to. LOG(PR_LOG_DEBUG, "OmxDecoder::ReadVideo failed, err=%d", err); return false; } return true; }