Exemplo n.º 1
0
void
VideoTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
        TrackID aID,
        StreamTime aTrackOffset,
        uint32_t aTrackEvents,
        const MediaSegment& aQueuedMedia)
{
    if (mCanceled) {
        return;
    }

    const VideoSegment& video = static_cast<const VideoSegment&>(aQueuedMedia);

    // Check and initialize parameters for codec encoder.
    if (!mInitialized) {
        mInitCounter++;
        TRACK_LOG(LogLevel::Debug, ("Init the video encoder %d times", mInitCounter));
        VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(video));
        while (!iter.IsEnded()) {
            VideoChunk chunk = *iter;
            if (!chunk.IsNull()) {
                gfx::IntSize imgsize = chunk.mFrame.GetImage()->GetSize();
                gfx::IntSize intrinsicSize = chunk.mFrame.GetIntrinsicSize();
                nsresult rv = Init(imgsize.width, imgsize.height,
                                   intrinsicSize.width, intrinsicSize.height,
                                   aGraph->GraphRate());
                if (NS_FAILED(rv)) {
                    LOG("[VideoTrackEncoder]: Fail to initialize the encoder!");
                    NotifyCancel();
                }
                break;
            }

            iter.Next();
        }

        mNotInitDuration += aQueuedMedia.GetDuration();
        if (!mInitialized &&
                (mNotInitDuration / aGraph->GraphRate() > INIT_FAILED_DURATION) &&
                mInitCounter > 1) {
            LOG("[VideoTrackEncoder]: Initialize failed for 30s.");
            NotifyEndOfStream();
            return;
        }
    }

    AppendVideoSegment(video);

    // The stream has stopped and reached the end of track.
    if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) {
        LOG("[VideoTrackEncoder]: Receive TRACK_EVENT_ENDED .");
        NotifyEndOfStream();
    }

}
Exemplo n.º 2
0
void
VideoTrackEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
                                            TrackID aID,
                                            TrackRate aTrackRate,
                                            TrackTicks aTrackOffset,
                                            uint32_t aTrackEvents,
                                            const MediaSegment& aQueuedMedia)
{
  if (mCanceled) {
    return;
  }

  const VideoSegment& video = static_cast<const VideoSegment&>(aQueuedMedia);

   // Check and initialize parameters for codec encoder.
  if (!mInitialized) {
#ifdef PR_LOGGING
    mVideoInitCounter++;
    TRACK_LOG(PR_LOG_DEBUG, ("Init the video encoder %d times", mVideoInitCounter));
#endif
    VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(video));
    while (!iter.IsEnded()) {
      VideoChunk chunk = *iter;
      if (!chunk.IsNull()) {
        gfx::IntSize imgsize = chunk.mFrame.GetImage()->GetSize();
        gfxIntSize intrinsicSize = chunk.mFrame.GetIntrinsicSize();
        nsresult rv = Init(imgsize.width, imgsize.height,
                           intrinsicSize.width, intrinsicSize.height,
                           aTrackRate);
        if (NS_FAILED(rv)) {
          LOG("[VideoTrackEncoder]: Fail to initialize the encoder!");
          NotifyCancel();
        }
        break;
      }

      iter.Next();
    }
  }

  AppendVideoSegment(video);

  // The stream has stopped and reached the end of track.
  if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) {
    LOG("[VideoTrackEncoder]: Receive TRACK_EVENT_ENDED .");
    NotifyEndOfStream();
  }

}
Exemplo n.º 3
0
nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
{
  if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
    PrepareMutedFrame();
  } else {
    layers::Image* img = aChunk.mFrame.GetImage();
    ImageFormat format = img->GetFormat();
    if (format != ImageFormat::PLANAR_YCBCR) {
      VP8LOG("Unsupported video format\n");
      return NS_ERROR_FAILURE;
    }

    // Cast away constness b/c some of the accessors are non-const
    layers::PlanarYCbCrImage* yuv =
    const_cast<layers::PlanarYCbCrImage *>(static_cast<const layers::PlanarYCbCrImage *>(img));
    // Big-time assumption here that this is all contiguous data coming
    // from getUserMedia or other sources.
    MOZ_ASSERT(yuv);
    const layers::PlanarYCbCrImage::Data *data = yuv->GetData();

    mVPXImageWrapper->planes[PLANE_Y] = data->mYChannel;
    mVPXImageWrapper->planes[PLANE_U] = data->mCbChannel;
    mVPXImageWrapper->planes[PLANE_V] = data->mCrChannel;
    mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
    mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
    mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
  }
  return NS_OK;
}
Exemplo n.º 4
0
nsresult
VideoTrackEncoder::AppendVideoSegment(const VideoSegment& aSegment)
{
  ReentrantMonitorAutoEnter mon(mReentrantMonitor);

  // Append all video segments from MediaStreamGraph, including null an
  // non-null frames.
  VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(aSegment));
  while (!iter.IsEnded()) {
    VideoChunk chunk = *iter;
    mTotalFrameDuration += chunk.GetDuration();
    mLastFrameDuration += chunk.GetDuration();
    // Send only the unique video frames for encoding.
    // Or if we got the same video chunks more than 1 seconds,
    // force to send into encoder.
    if ((mLastFrame != chunk.mFrame) ||
        (mLastFrameDuration >= mTrackRate)) {
      RefPtr<layers::Image> image = chunk.mFrame.GetImage();
      // Because we may get chunks with a null image (due to input blocking),
      // accumulate duration and give it to the next frame that arrives.
      // Canonically incorrect - the duration should go to the previous frame
      // - but that would require delaying until the next frame arrives.
      // Best would be to do like OMXEncoder and pass an effective timestamp
      // in with each frame.
      if (image) {
        mRawSegment.AppendFrame(image.forget(),
                                mLastFrameDuration,
                                chunk.mFrame.GetIntrinsicSize(),
                                PRINCIPAL_HANDLE_NONE,
                                chunk.mFrame.GetForceBlack());
        mLastFrameDuration = 0;
      }
    }
    mLastFrame.TakeFrom(&chunk.mFrame);
    iter.Next();
  }

  if (mRawSegment.GetDuration() > 0) {
    mReentrantMonitor.NotifyAll();
  }

  return NS_OK;
}
Exemplo n.º 5
0
nsresult
VideoTrackEncoder::AppendVideoSegment(const VideoSegment& aSegment)
{
  ReentrantMonitorAutoEnter mon(mReentrantMonitor);

  // Append all video segments from MediaStreamGraph, including null an
  // non-null frames.
  VideoSegment::ChunkIterator iter(const_cast<VideoSegment&>(aSegment));
  while (!iter.IsEnded()) {
    VideoChunk chunk = *iter;
    nsRefPtr<layers::Image> image = chunk.mFrame.GetImage();
    mRawSegment.AppendFrame(image.forget(), chunk.GetDuration(),
                            chunk.mFrame.GetIntrinsicSize().ToIntSize());
    iter.Next();
  }

  if (mRawSegment.GetDuration() > 0) {
    mReentrantMonitor.NotifyAll();
  }

  return NS_OK;
}
Exemplo n.º 6
0
nsresult
OmxVideoTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData)
{
  VideoSegment segment;
  {
    // Move all the samples from mRawSegment to segment. We only hold the
    // monitor in this block.
    ReentrantMonitorAutoEnter mon(mReentrantMonitor);

    // Wait if mEncoder is not initialized nor is being canceled.
    while (!mCanceled && (!mInitialized ||
          (mRawSegment.GetDuration() == 0 && !mEndOfStream))) {
      mReentrantMonitor.Wait();
    }

    if (mCanceled || mEncodingComplete) {
      return NS_ERROR_FAILURE;
    }

    segment.AppendFrom(&mRawSegment);
  }

  nsresult rv;
  // Start queuing raw frames to the input buffers of OMXCodecWrapper.
  VideoSegment::ChunkIterator iter(segment);
  while (!iter.IsEnded()) {
    VideoChunk chunk = *iter;

    // Send only the unique video frames to OMXCodecWrapper.
    if (mLastFrame != chunk.mFrame) {
      uint64_t totalDurationUs = mTotalFrameDuration * USECS_PER_S / mTrackRate;
      layers::Image* img = (chunk.IsNull() || chunk.mFrame.GetForceBlack()) ?
                           nullptr : chunk.mFrame.GetImage();
      rv = mEncoder->Encode(img, mFrameWidth, mFrameHeight, totalDurationUs);
      NS_ENSURE_SUCCESS(rv, rv);
    }

    mLastFrame.TakeFrom(&chunk.mFrame);
    mTotalFrameDuration += chunk.GetDuration();

    iter.Next();
  }

  // Send the EOS signal to OMXCodecWrapper.
  if (mEndOfStream && iter.IsEnded() && !mEosSetInEncoder) {
    uint64_t totalDurationUs = mTotalFrameDuration * USECS_PER_S / mTrackRate;
    layers::Image* img = (!mLastFrame.GetImage() || mLastFrame.GetForceBlack())
                         ? nullptr : mLastFrame.GetImage();
    rv = mEncoder->Encode(img, mFrameWidth, mFrameHeight, totalDurationUs,
                          OMXCodecWrapper::BUFFER_EOS);
    NS_ENSURE_SUCCESS(rv, rv);

    // Keep sending EOS signal until OMXVideoEncoder gets it.
    mEosSetInEncoder = true;
  }

  // Dequeue an encoded frame from the output buffers of OMXCodecWrapper.
  nsTArray<uint8_t> buffer;
  int outFlags = 0;
  int64_t outTimeStampUs = 0;
  rv = mEncoder->GetNextEncodedFrame(&buffer, &outTimeStampUs, &outFlags,
                                     GET_ENCODED_VIDEO_FRAME_TIMEOUT);
  NS_ENSURE_SUCCESS(rv, rv);
  if (!buffer.IsEmpty()) {
    nsRefPtr<EncodedFrame> videoData = new EncodedFrame();
    if (outFlags & OMXCodecWrapper::BUFFER_CODEC_CONFIG) {
      videoData->SetFrameType(EncodedFrame::AVC_CSD);
    } else {
      videoData->SetFrameType((outFlags & OMXCodecWrapper::BUFFER_SYNC_FRAME) ?
                              EncodedFrame::AVC_I_FRAME : EncodedFrame::AVC_P_FRAME);
    }
    videoData->SwapInFrameData(buffer);
    videoData->SetTimeStamp(outTimeStampUs);
    aData.AppendEncodedFrame(videoData);
  }

  if (outFlags & OMXCodecWrapper::BUFFER_EOS) {
    mEncodingComplete = true;
    OMX_LOG("Done encoding video.");
  }

  return NS_OK;
}
Exemplo n.º 7
0
void
CaptureTask::SetCurrentFrames(const VideoSegment& aSegment)
{
  if (mImageGrabbedOrTrackEnd) {
    return;
  }

  // Callback for encoding complete, it calls on main thread.
  class EncodeComplete : public dom::EncodeCompleteCallback
  {
  public:
    explicit EncodeComplete(CaptureTask* aTask) : mTask(aTask) {}

    nsresult ReceiveBlob(already_AddRefed<dom::Blob> aBlob) override
    {
      RefPtr<dom::Blob> blob(aBlob);
      mTask->TaskComplete(blob.forget(), NS_OK);
      mTask = nullptr;
      return NS_OK;
    }

  protected:
    RefPtr<CaptureTask> mTask;
  };

  VideoSegment::ConstChunkIterator iter(aSegment);



  while (!iter.IsEnded()) {
    VideoChunk chunk = *iter;

    // Extract the first valid video frame.
    VideoFrame frame;
    if (!chunk.IsNull()) {
      RefPtr<layers::Image> image;
      if (chunk.mFrame.GetForceBlack()) {
        // Create a black image.
        image = VideoFrame::CreateBlackImage(chunk.mFrame.GetIntrinsicSize());
      } else {
        image = chunk.mFrame.GetImage();
      }
      MOZ_ASSERT(image);
      mImageGrabbedOrTrackEnd = true;

      // Encode image.
      nsresult rv;
      nsAutoString type(NS_LITERAL_STRING("image/jpeg"));
      nsAutoString options;
      rv = dom::ImageEncoder::ExtractDataFromLayersImageAsync(
                                type,
                                options,
                                false,
                                image,
                                new EncodeComplete(this));
      if (NS_FAILED(rv)) {
        PostTrackEndEvent();
      }
      return;
    }
    iter.Next();
  }
}
Exemplo n.º 8
0
nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
{
  nsRefPtr<Image> img;
  if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
    if (!mMuteFrame) {
      mMuteFrame = VideoFrame::CreateBlackImage(gfxIntSize(mFrameWidth, mFrameHeight));
      MOZ_ASSERT(mMuteFrame);
    }
    img = mMuteFrame;
  } else {
    img = aChunk.mFrame.GetImage();
  }

  ImageFormat format = img->GetFormat();
  if (format != ImageFormat::PLANAR_YCBCR) {
    VP8LOG("Unsupported video format\n");
    return NS_ERROR_FAILURE;
  }

  // Cast away constness b/c some of the accessors are non-const
  PlanarYCbCrImage* yuv =
  const_cast<PlanarYCbCrImage *>(static_cast<const PlanarYCbCrImage *>(img.get()));
  // Big-time assumption here that this is all contiguous data coming
  // from getUserMedia or other sources.
  MOZ_ASSERT(yuv);
  if (!yuv->IsValid()) {
    NS_WARNING("PlanarYCbCrImage is not valid");
    return NS_ERROR_FAILURE;
  }
  const PlanarYCbCrImage::Data *data = yuv->GetData();

  if (isYUV420(data) && !data->mCbSkip) { // 420 planar
    mVPXImageWrapper->planes[VPX_PLANE_Y] = data->mYChannel;
    mVPXImageWrapper->planes[VPX_PLANE_U] = data->mCbChannel;
    mVPXImageWrapper->planes[VPX_PLANE_V] = data->mCrChannel;
    mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
    mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
    mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;
  } else {
    uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
    uint32_t halfWidth = (mFrameWidth + 1) / 2;
    uint32_t halfHeight = (mFrameHeight + 1) / 2;
    uint32_t uvPlaneSize = halfWidth * halfHeight;
    if (mI420Frame.IsEmpty()) {
      mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
    }

    MOZ_ASSERT(mI420Frame.Length() >= (yPlaneSize + uvPlaneSize * 2));
    uint8_t *y = mI420Frame.Elements();
    uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
    uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;

    if (isYUV420(data) && data->mCbSkip) {
      // If mCbSkip is set, we assume it's nv12 or nv21.
      if (data->mCbChannel < data->mCrChannel) { // nv12
        libyuv::NV12ToI420(data->mYChannel, data->mYStride,
                           data->mCbChannel, data->mCbCrStride,
                           y, mFrameWidth,
                           cb, halfWidth,
                           cr, halfWidth,
                           mFrameWidth, mFrameHeight);
      } else { // nv21
        libyuv::NV21ToI420(data->mYChannel, data->mYStride,
                           data->mCrChannel, data->mCbCrStride,
                           y, mFrameWidth,
                           cb, halfWidth,
                           cr, halfWidth,
                           mFrameWidth, mFrameHeight);
      }
    } else if (isYUV444(data) && !data->mCbSkip) {
      libyuv::I444ToI420(data->mYChannel, data->mYStride,
                         data->mCbChannel, data->mCbCrStride,
                         data->mCrChannel, data->mCbCrStride,
                         y, mFrameWidth,
                         cb, halfWidth,
                         cr, halfWidth,
                         mFrameWidth, mFrameHeight);
    } else if (isYUV422(data) && !data->mCbSkip) {
      libyuv::I422ToI420(data->mYChannel, data->mYStride,
                         data->mCbChannel, data->mCbCrStride,
                         data->mCrChannel, data->mCbCrStride,
                         y, mFrameWidth,
                         cb, halfWidth,
                         cr, halfWidth,
                         mFrameWidth, mFrameHeight);
    } else {
      VP8LOG("Unsupported planar format\n");
      return NS_ERROR_NOT_IMPLEMENTED;
    }

    mVPXImageWrapper->planes[VPX_PLANE_Y] = y;
    mVPXImageWrapper->planes[VPX_PLANE_U] = cb;
    mVPXImageWrapper->planes[VPX_PLANE_V] = cr;
    mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
    mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
    mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;
  }

  return NS_OK;
}
void VideoFrameContainer::SetCurrentFrames(const VideoSegment& aSegment)
{
  if (aSegment.IsEmpty()) {
    return;
  }

  MutexAutoLock lock(mMutex);

  // Collect any new frames produced in this iteration.
  AutoTArray<ImageContainer::NonOwningImage,4> newImages;
  PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE;

  VideoSegment::ConstChunkIterator iter(aSegment);
  while (!iter.IsEnded()) {
    VideoChunk chunk = *iter;

    const VideoFrame* frame = &chunk.mFrame;
    if (*frame == mLastPlayedVideoFrame) {
      iter.Next();
      continue;
    }

    Image* image = frame->GetImage();
    CONTAINER_LOG(LogLevel::Verbose,
                  ("VideoFrameContainer %p writing video frame %p (%d x %d)",
                  this, image, frame->GetIntrinsicSize().width,
                  frame->GetIntrinsicSize().height));

    if (frame->GetForceBlack()) {
      if (!mBlackImage) {
        mBlackImage = GetImageContainer()->CreatePlanarYCbCrImage();
        if (mBlackImage) {
          // Sets the image to a single black pixel, which will be scaled to
          // fill the rendered size.
          SetImageToBlackPixel(mBlackImage->AsPlanarYCbCrImage());
        }
      }
      if (mBlackImage) {
        image = mBlackImage;
      }
    }
    // Don't append null image to the newImages.
    if (!image) {
      iter.Next();
      continue;
    }
    newImages.AppendElement(ImageContainer::NonOwningImage(image, chunk.mTimeStamp));

    lastPrincipalHandle = chunk.GetPrincipalHandle();

    mLastPlayedVideoFrame = *frame;
    iter.Next();
  }

  // Don't update if there are no changes.
  if (newImages.IsEmpty()) {
    return;
  }

  AutoTArray<ImageContainer::NonOwningImage,4> images;

  bool principalHandleChanged =
     lastPrincipalHandle != PRINCIPAL_HANDLE_NONE &&
     lastPrincipalHandle != GetLastPrincipalHandleLocked();

  // Add the frames from this iteration.
  for (auto& image : newImages) {
    image.mFrameID = NewFrameID();
    images.AppendElement(image);
  }

  if (principalHandleChanged) {
    UpdatePrincipalHandleForFrameIDLocked(lastPrincipalHandle,
                                          newImages.LastElement().mFrameID);
  }

  SetCurrentFramesLocked(mLastPlayedVideoFrame.GetIntrinsicSize(), images);
  nsCOMPtr<nsIRunnable> event =
    new VideoFrameContainerInvalidateRunnable(this);
  mMainThread->Dispatch(event.forget());

  images.ClearAndRetainStorage();
}
Exemplo n.º 10
0
nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
{
  RefPtr<Image> img;
  if (aChunk.mFrame.GetForceBlack() || aChunk.IsNull()) {
    if (!mMuteFrame) {
      mMuteFrame = VideoFrame::CreateBlackImage(gfx::IntSize(mFrameWidth, mFrameHeight));
      MOZ_ASSERT(mMuteFrame);
    }
    img = mMuteFrame;
  } else {
    img = aChunk.mFrame.GetImage();
  }

  if (img->GetSize() != IntSize(mFrameWidth, mFrameHeight)) {
    VP8LOG("Dynamic resolution changes (was %dx%d, now %dx%d) are unsupported\n",
           mFrameWidth, mFrameHeight, img->GetSize().width, img->GetSize().height);
    return NS_ERROR_FAILURE;
  }

  ImageFormat format = img->GetFormat();
  if (format == ImageFormat::PLANAR_YCBCR) {
    PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());

    MOZ_RELEASE_ASSERT(yuv);
    if (!yuv->IsValid()) {
      NS_WARNING("PlanarYCbCrImage is not valid");
      return NS_ERROR_FAILURE;
    }
    const PlanarYCbCrImage::Data *data = yuv->GetData();

    if (isYUV420(data) && !data->mCbSkip) {
      // 420 planar, no need for conversions
      mVPXImageWrapper->planes[VPX_PLANE_Y] = data->mYChannel;
      mVPXImageWrapper->planes[VPX_PLANE_U] = data->mCbChannel;
      mVPXImageWrapper->planes[VPX_PLANE_V] = data->mCrChannel;
      mVPXImageWrapper->stride[VPX_PLANE_Y] = data->mYStride;
      mVPXImageWrapper->stride[VPX_PLANE_U] = data->mCbCrStride;
      mVPXImageWrapper->stride[VPX_PLANE_V] = data->mCbCrStride;

      return NS_OK;
    }
  }

  // Not 420 planar, have to convert
  uint32_t yPlaneSize = mFrameWidth * mFrameHeight;
  uint32_t halfWidth = (mFrameWidth + 1) / 2;
  uint32_t halfHeight = (mFrameHeight + 1) / 2;
  uint32_t uvPlaneSize = halfWidth * halfHeight;

  if (mI420Frame.IsEmpty()) {
    mI420Frame.SetLength(yPlaneSize + uvPlaneSize * 2);
  }

  uint8_t *y = mI420Frame.Elements();
  uint8_t *cb = mI420Frame.Elements() + yPlaneSize;
  uint8_t *cr = mI420Frame.Elements() + yPlaneSize + uvPlaneSize;

  if (format == ImageFormat::PLANAR_YCBCR) {
    PlanarYCbCrImage* yuv = static_cast<PlanarYCbCrImage *>(img.get());

    MOZ_RELEASE_ASSERT(yuv);
    if (!yuv->IsValid()) {
      NS_WARNING("PlanarYCbCrImage is not valid");
      return NS_ERROR_FAILURE;
    }
    const PlanarYCbCrImage::Data *data = yuv->GetData();

    int rv;
    std::string yuvFormat;
    if (isYUV420(data) && data->mCbSkip) {
      // If mCbSkip is set, we assume it's nv12 or nv21.
      if (data->mCbChannel < data->mCrChannel) { // nv12
        rv = libyuv::NV12ToI420(data->mYChannel, data->mYStride,
                                data->mCbChannel, data->mCbCrStride,
                                y, mFrameWidth,
                                cb, halfWidth,
                                cr, halfWidth,
                                mFrameWidth, mFrameHeight);
        yuvFormat = "NV12";
      } else { // nv21
        rv = libyuv::NV21ToI420(data->mYChannel, data->mYStride,
                                data->mCrChannel, data->mCbCrStride,
                                y, mFrameWidth,
                                cb, halfWidth,
                                cr, halfWidth,
                                mFrameWidth, mFrameHeight);
        yuvFormat = "NV21";
      }
    } else if (isYUV444(data) && !data->mCbSkip) {
      rv = libyuv::I444ToI420(data->mYChannel, data->mYStride,
                              data->mCbChannel, data->mCbCrStride,
                              data->mCrChannel, data->mCbCrStride,
                              y, mFrameWidth,
                              cb, halfWidth,
                              cr, halfWidth,
                              mFrameWidth, mFrameHeight);
      yuvFormat = "I444";
    } else if (isYUV422(data) && !data->mCbSkip) {
      rv = libyuv::I422ToI420(data->mYChannel, data->mYStride,
                              data->mCbChannel, data->mCbCrStride,
                              data->mCrChannel, data->mCbCrStride,
                              y, mFrameWidth,
                              cb, halfWidth,
                              cr, halfWidth,
                              mFrameWidth, mFrameHeight);
      yuvFormat = "I422";
    } else {
      VP8LOG("Unsupported planar format\n");
      NS_ASSERTION(false, "Unsupported planar format");
      return NS_ERROR_NOT_IMPLEMENTED;
    }

    if (rv != 0) {
      VP8LOG("Converting an %s frame to I420 failed\n", yuvFormat.c_str());
      return NS_ERROR_FAILURE;
    }

    VP8LOG("Converted an %s frame to I420\n");
  } else {
    // Not YCbCr at all. Try to get access to the raw data and convert.

    RefPtr<SourceSurface> surf = img->GetAsSourceSurface();
    if (!surf) {
      VP8LOG("Getting surface from %s image failed\n", Stringify(format).c_str());
      return NS_ERROR_FAILURE;
    }

    RefPtr<DataSourceSurface> data = surf->GetDataSurface();
    if (!data) {
      VP8LOG("Getting data surface from %s image with %s (%s) surface failed\n",
             Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
             Stringify(surf->GetFormat()).c_str());
      return NS_ERROR_FAILURE;
    }

    DataSourceSurface::ScopedMap map(data, DataSourceSurface::READ);
    if (!map.IsMapped()) {
      VP8LOG("Reading DataSourceSurface from %s image with %s (%s) surface failed\n",
             Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),
             Stringify(surf->GetFormat()).c_str());
      return NS_ERROR_FAILURE;
    }

    int rv;
    switch (surf->GetFormat()) {
      case SurfaceFormat::B8G8R8A8:
      case SurfaceFormat::B8G8R8X8:
        rv = libyuv::ARGBToI420(static_cast<uint8*>(map.GetData()),
                                map.GetStride(),
                                y, mFrameWidth,
                                cb, halfWidth,
                                cr, halfWidth,
                                mFrameWidth, mFrameHeight);
        break;
      case SurfaceFormat::R5G6B5_UINT16:
        rv = libyuv::RGB565ToI420(static_cast<uint8*>(map.GetData()),
                                  map.GetStride(),
                                  y, mFrameWidth,
                                  cb, halfWidth,
                                  cr, halfWidth,
                                  mFrameWidth, mFrameHeight);
        break;
      default:
        VP8LOG("Unsupported SourceSurface format %s\n",
               Stringify(surf->GetFormat()).c_str());
        NS_ASSERTION(false, "Unsupported SourceSurface format");
        return NS_ERROR_NOT_IMPLEMENTED;
    }

    if (rv != 0) {
      VP8LOG("%s to I420 conversion failed\n",
             Stringify(surf->GetFormat()).c_str());
      return NS_ERROR_FAILURE;
    }

    VP8LOG("Converted a %s frame to I420\n",
           Stringify(surf->GetFormat()).c_str());
  }

  mVPXImageWrapper->planes[VPX_PLANE_Y] = y;
  mVPXImageWrapper->planes[VPX_PLANE_U] = cb;
  mVPXImageWrapper->planes[VPX_PLANE_V] = cr;
  mVPXImageWrapper->stride[VPX_PLANE_Y] = mFrameWidth;
  mVPXImageWrapper->stride[VPX_PLANE_U] = halfWidth;
  mVPXImageWrapper->stride[VPX_PLANE_V] = halfWidth;

  return NS_OK;
}