static void GonkFrameBuilder(Image* aImage, void* aBuffer, uint32_t aWidth, uint32_t aHeight) { /** * Cast the generic Image back to our platform-specific type and * populate it. */ GrallocImage* videoImage = static_cast<GrallocImage*>(aImage); GrallocImage::GrallocData data; data.mGraphicBuffer = static_cast<layers::GraphicBufferLocked*>(aBuffer); data.mPicSize = gfxIntSize(aWidth, aHeight); videoImage->SetData(data); }
/* static */ already_AddRefed<VideoData> VideoData::Create(const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset, int64_t aTime, int64_t aDuration, mozilla::layers::TextureClient* aBuffer, bool aKeyframe, int64_t aTimecode, const IntRect& aPicture) { if (!aContainer) { // Create a dummy VideoData with no image. This gives us something to // send to media streams if necessary. RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe, aTimecode, aInfo.mDisplay, 0)); return v.forget(); } // The following situations could be triggered by invalid input if (aPicture.width <= 0 || aPicture.height <= 0) { NS_WARNING("Empty picture rect"); return nullptr; } // Ensure the picture size specified in the headers can be extracted out of // the frame we've been supplied without indexing out of bounds. CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width); CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height); if (!xLimit.isValid() || !yLimit.isValid()) { // The specified picture dimensions can't be contained inside the video // frame, we'll stomp memory if we try to copy it. Fail. NS_WARNING("Overflowing picture rect"); return nullptr; } RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe, aTimecode, aInfo.mDisplay, 0)); v->mImage = aContainer->CreateImage(ImageFormat::GRALLOC_PLANAR_YCBCR); if (!v->mImage) { return nullptr; } NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::GRALLOC_PLANAR_YCBCR, "Wrong format?"); typedef mozilla::layers::GrallocImage GrallocImage; GrallocImage* videoImage = static_cast<GrallocImage*>(v->mImage.get()); GrallocImage::GrallocData data; data.mPicSize = aPicture.Size(); data.mGraphicBuffer = aBuffer; if (!videoImage->SetData(data)) { return nullptr; } return v.forget(); }
nsresult OMXVideoEncoder::Encode(const Image* aImage, int aWidth, int aHeight, int64_t aTimestamp, int aInputFlags) { MOZ_ASSERT(mStarted, "Configure() should be called before Encode()."); NS_ENSURE_TRUE(aWidth == mWidth && aHeight == mHeight && aTimestamp >= 0, NS_ERROR_INVALID_ARG); status_t result; // Dequeue an input buffer. uint32_t index; result = mCodec->dequeueInputBuffer(&index, INPUT_BUFFER_TIMEOUT_US); NS_ENSURE_TRUE(result == OK, NS_ERROR_FAILURE); const sp<ABuffer>& inBuf = mInputBufs.itemAt(index); uint8_t* dst = inBuf->data(); size_t dstSize = inBuf->capacity(); size_t yLen = aWidth * aHeight; size_t uvLen = yLen / 2; // Buffer should be large enough to hold input image data. MOZ_ASSERT(dstSize >= yLen + uvLen); inBuf->setRange(0, yLen + uvLen); if (!aImage) { // Generate muted/black image directly in buffer. dstSize = yLen + uvLen; // Fill Y plane. memset(dst, 0x10, yLen); // Fill UV plane. memset(dst + yLen, 0x80, uvLen); } else { Image* img = const_cast<Image*>(aImage); ImageFormat format = img->GetFormat(); MOZ_ASSERT(aWidth == img->GetSize().width && aHeight == img->GetSize().height); if (format == GRALLOC_PLANAR_YCBCR) { // Get graphic buffer pointer. void* imgPtr = nullptr; GrallocImage* nativeImage = static_cast<GrallocImage*>(img); SurfaceDescriptor handle = nativeImage->GetSurfaceDescriptor(); SurfaceDescriptorGralloc gralloc = handle.get_SurfaceDescriptorGralloc(); sp<GraphicBuffer> graphicBuffer = GrallocBufferActor::GetFrom(gralloc); graphicBuffer->lock(GraphicBuffer::USAGE_SW_READ_MASK, &imgPtr); uint8_t* src = static_cast<uint8_t*>(imgPtr); // Only support NV21 for now. MOZ_ASSERT(graphicBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_YCrCb_420_SP); // Build PlanarYCbCrData for NV21 buffer. PlanarYCbCrData nv21; // Y plane. nv21.mYChannel = src; nv21.mYSize.width = aWidth; nv21.mYSize.height = aHeight; nv21.mYStride = aWidth; nv21.mYSkip = 0; // Interleaved VU plane. nv21.mCrChannel = src + yLen; nv21.mCrSkip = 1; nv21.mCbChannel = nv21.mCrChannel + 1; nv21.mCbSkip = 1; nv21.mCbCrStride = aWidth; // 4:2:0. nv21.mCbCrSize.width = aWidth / 2; nv21.mCbCrSize.height = aHeight / 2; ConvertPlanarYCbCrToNV12(&nv21, dst); graphicBuffer->unlock(); } else if (format == PLANAR_YCBCR) { ConvertPlanarYCbCrToNV12(static_cast<PlanarYCbCrImage*>(img)->GetData(), dst); } else { // TODO: support RGB to YUV color conversion. NS_ERROR("Unsupported input image type."); } } // Queue this input buffer. result = mCodec->queueInputBuffer(index, 0, dstSize, aTimestamp, aInputFlags); return result == OK ? NS_OK : NS_ERROR_FAILURE; }