nsresult GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v) { *v = nullptr; nsRefPtr<VideoData> data; int64_t timeUs; int32_t keyFrame; if (mVideoBuffer == nullptr) { GVDM_LOG("Video Buffer is not valid!"); return NS_ERROR_UNEXPECTED; } if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) { GVDM_LOG("Decoder did not return frame time"); return NS_ERROR_UNEXPECTED; } int64_t duration; nsresult rv = QueueFrameTimeOut(timeUs, duration); NS_ENSURE_SUCCESS(rv, rv); if (mVideoBuffer->range_length() == 0) { // Some decoders may return spurious empty buffers that we just want to ignore // quoted from Android's AwesomePlayer.cpp ReleaseVideoBuffer(); return NS_ERROR_NOT_AVAILABLE; } if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) { keyFrame = 0; } gfx::IntRect picture = ToIntRect(mPicture); if (mFrameInfo.mWidth != mInitialFrame.width || mFrameInfo.mHeight != mInitialFrame.height) { // Frame size is different from what the container reports. This is legal, // and we will preserve the ratio of the crop rectangle as it // was reported relative to the picture size reported by the container. picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width; picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height; picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width; picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height; } RefPtr<mozilla::layers::TextureClient> textureClient; if ((mVideoBuffer->graphicBuffer().get())) { textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get()); } if (textureClient) { GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get()); grallocClient->SetMediaBuffer(mVideoBuffer); textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this); data = VideoData::Create(mInfo.mVideo, mImageContainer, aStreamOffset, timeUs, duration, textureClient, keyFrame, -1, picture); } else { if (!mVideoBuffer->data()) { GVDM_LOG("No data in Video Buffer!"); return NS_ERROR_UNEXPECTED; } uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data(); int32_t stride = mFrameInfo.mStride; int32_t slice_height = mFrameInfo.mSliceHeight; // Converts to OMX_COLOR_FormatYUV420Planar if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) { ARect crop; crop.top = 0; crop.bottom = mFrameInfo.mHeight; crop.left = 0; crop.right = mFrameInfo.mWidth; yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight); if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(), mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) { ReleaseVideoBuffer(); GVDM_LOG("Color conversion failed!"); return NS_ERROR_UNEXPECTED; } stride = mFrameInfo.mWidth; slice_height = mFrameInfo.mHeight; } size_t yuv420p_y_size = stride * slice_height; size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2); uint8_t *yuv420p_y = yuv420p_buffer; uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size; uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size; // This is the approximate byte position in the stream. int64_t pos = aStreamOffset; VideoData::YCbCrBuffer b; b.mPlanes[0].mData = yuv420p_y; b.mPlanes[0].mWidth = mFrameInfo.mWidth; b.mPlanes[0].mHeight = mFrameInfo.mHeight; b.mPlanes[0].mStride = stride; b.mPlanes[0].mOffset = 0; b.mPlanes[0].mSkip = 0; b.mPlanes[1].mData = yuv420p_u; b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2; b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2; b.mPlanes[1].mStride = (stride + 1) / 2; b.mPlanes[1].mOffset = 0; b.mPlanes[1].mSkip = 0; b.mPlanes[2].mData = yuv420p_v; b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2; b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2; b.mPlanes[2].mStride = (stride + 1) / 2; b.mPlanes[2].mOffset = 0; b.mPlanes[2].mSkip = 0; data = VideoData::Create( mInfo.mVideo, mImageContainer, pos, timeUs, 1, // We don't know the duration. b, keyFrame, -1, picture); ReleaseVideoBuffer(); } data.forget(v); return NS_OK; }
already_AddRefed<VideoData> GonkVideoDecoderManager::CreateVideoDataFromDataBuffer(MediaBuffer* aSource, gfx::IntRect& aPicture) { if (!aSource->data()) { GVDM_LOG("No data in Video Buffer!"); return nullptr; } uint8_t *yuv420p_buffer = (uint8_t *)aSource->data(); int32_t stride = mFrameInfo.mStride; int32_t slice_height = mFrameInfo.mSliceHeight; // Converts to OMX_COLOR_FormatYUV420Planar if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) { ARect crop; crop.top = 0; crop.bottom = mFrameInfo.mHeight; crop.left = 0; crop.right = mFrameInfo.mWidth; yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight); if (mColorConverter.convertDecoderOutputToI420(aSource->data(), mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) { GVDM_LOG("Color conversion failed!"); return nullptr; } stride = mFrameInfo.mWidth; slice_height = mFrameInfo.mHeight; } size_t yuv420p_y_size = stride * slice_height; size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2); uint8_t *yuv420p_y = yuv420p_buffer; uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size; uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size; VideoData::YCbCrBuffer b; b.mPlanes[0].mData = yuv420p_y; b.mPlanes[0].mWidth = mFrameInfo.mWidth; b.mPlanes[0].mHeight = mFrameInfo.mHeight; b.mPlanes[0].mStride = stride; b.mPlanes[0].mOffset = 0; b.mPlanes[0].mSkip = 0; b.mPlanes[1].mData = yuv420p_u; b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2; b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2; b.mPlanes[1].mStride = (stride + 1) / 2; b.mPlanes[1].mOffset = 0; b.mPlanes[1].mSkip = 0; b.mPlanes[2].mData = yuv420p_v; b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2; b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2; b.mPlanes[2].mStride = (stride + 1) / 2; b.mPlanes[2].mOffset = 0; b.mPlanes[2].mSkip = 0; RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo, mImageContainer, 0, // Filled later by caller. 0, // Filled later by caller. 1, // We don't know the duration. b, 0, // Filled later by caller. -1, aPicture); return data.forget(); }