void
GonkVideoDecoderManager::codecReserved()
{
  GVDM_LOG("codecReserved");
  sp<AMessage> format = new AMessage;
  sp<Surface> surface;
  status_t rv = OK;
  // Fixed values
  GVDM_LOG("Configure video mime type: %s, widht:%d, height:%d", mMimeType.get(), mVideoWidth, mVideoHeight);
  format->setString("mime", mMimeType.get());
  format->setInt32("width", mVideoWidth);
  format->setInt32("height", mVideoHeight);
  if (mNativeWindow != nullptr) {
    surface = new Surface(mNativeWindow->getBufferQueue());
  }
  mDecoder->configure(format, surface, nullptr, 0);
  mDecoder->Prepare();

  if (mMimeType.EqualsLiteral("video/mp4v-es")) {
    rv = mDecoder->Input(mCodecSpecificData->Elements(),
                         mCodecSpecificData->Length(), 0,
                         android::MediaCodec::BUFFER_FLAG_CODECCONFIG);
  }

  if (rv != OK) {
    GVDM_LOG("Failed to configure codec!!!!");
    mReaderCallback->Error();
  }
}
RefPtr<MediaDataDecoder::InitPromise>
GonkVideoDecoderManager::Init()
{
  nsIntSize displaySize(mDisplayWidth, mDisplayHeight);
  nsIntRect pictureRect(0, 0, mVideoWidth, mVideoHeight);

  uint32_t maxWidth, maxHeight;
  char propValue[PROPERTY_VALUE_MAX];
  property_get("ro.moz.omx.hw.max_width", propValue, "-1");
  maxWidth = -1 == atoi(propValue) ? MAX_VIDEO_WIDTH : atoi(propValue);
  property_get("ro.moz.omx.hw.max_height", propValue, "-1");
  maxHeight = -1 == atoi(propValue) ? MAX_VIDEO_HEIGHT : atoi(propValue) ;

  if (mVideoWidth * mVideoHeight > maxWidth * maxHeight) {
    GVDM_LOG("Video resolution exceeds hw codec capability");
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }

  // Validate the container-reported frame and pictureRect sizes. This ensures
  // that our video frame creation code doesn't overflow.
  nsIntSize frameSize(mVideoWidth, mVideoHeight);
  if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
    GVDM_LOG("It is not a valid region");
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }

  mReaderTaskQueue = AbstractThread::GetCurrent()->AsTaskQueue();
  MOZ_ASSERT(mReaderTaskQueue);

  if (mDecodeLooper.get() != nullptr) {
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }

  if (!InitLoopers(MediaData::VIDEO_DATA)) {
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }

  RefPtr<InitPromise> p = mInitPromise.Ensure(__func__);
  android::sp<GonkVideoDecoderManager> self = this;
  mVideoCodecRequest.Begin(mVideoListener->Init()
    ->Then(mReaderTaskQueue, __func__,
      [self] (bool) -> void {
        self->mVideoCodecRequest.Complete();
        self->codecReserved();
      }, [self] (bool) -> void {
        self->mVideoCodecRequest.Complete();
        self->codecCanceled();
      }));
  mDecoder = MediaCodecProxy::CreateByType(mDecodeLooper, mMimeType.get(), false, mVideoListener);
  mDecoder->AsyncAskMediaCodec();

  uint32_t capability = MediaCodecProxy::kEmptyCapability;
  if (mDecoder->getCapability(&capability) == OK && (capability &
      MediaCodecProxy::kCanExposeGraphicBuffer)) {
    mNativeWindow = new GonkNativeWindow();
  }

  return p;
}
already_AddRefed<VideoData>
GonkVideoDecoderManager::CreateVideoDataFromGraphicBuffer(MediaBuffer* aSource,
                                                          gfx::IntRect& aPicture)
{
  sp<GraphicBuffer> srcBuffer(aSource->graphicBuffer());
  RefPtr<TextureClient> textureClient;

  if (mNeedsCopyBuffer) {
    // Copy buffer contents for bug 1199809.
    if (!mCopyAllocator) {
      mCopyAllocator = new TextureClientRecycleAllocator(ImageBridgeChild::GetSingleton());
    }
    if (!mCopyAllocator) {
      GVDM_LOG("Create buffer allocator failed!");
      return nullptr;
    }

    gfx::IntSize size(Align(aPicture.width, 2) , Align(aPicture.height, 2));
    textureClient =
      mCopyAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV, size,
                                      BackendSelector::Content,
                                      TextureFlags::DEFAULT,
                                      ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
    if (!textureClient) {
      GVDM_LOG("Copy buffer allocation failed!");
      return nullptr;
    }
    // Update size to match buffer's.
    aPicture.width = size.width;
    aPicture.height = size.height;

    sp<GraphicBuffer> destBuffer =
      static_cast<GrallocTextureClientOGL*>(textureClient.get())->GetGraphicBuffer();

    CopyGraphicBuffer(srcBuffer, destBuffer);
  } else {
    textureClient = mNativeWindow->getTextureClientFromBuffer(srcBuffer.get());
    textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
    GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
    grallocClient->SetMediaBuffer(aSource);
  }

  RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo,
                                             mImageContainer,
                                             0, // Filled later by caller.
                                             0, // Filled later by caller.
                                             1, // No way to pass sample duration from muxer to
                                                // OMX codec, so we hardcode the duration here.
                                             textureClient,
                                             false, // Filled later by caller.
                                             -1,
                                             aPicture);
  return data.forget();
}
// Called on GonkVideoDecoderManager::mManagerLooper thread.
void
GonkVideoDecoderManager::onMessageReceived(const sp<AMessage> &aMessage)
{
  switch (aMessage->what()) {
    case kNotifyCodecReserved:
    {
      // Our decode may have acquired the hardware resource that it needs
      // to start. Notify the state machine to resume loading metadata.
      GVDM_LOG("CodecReserved!");
      mReaderCallback->NotifyResourcesStatusChanged();
      break;
    }

    case kNotifyCodecCanceled:
    {
      mReaderCallback->ReleaseMediaResources();
      break;
    }

    case kNotifyPostReleaseBuffer:
    {
      ReleaseAllPendingVideoBuffers();
      break;
    }

    default:
      TRESPASS();
      break;
  }
}
void GonkVideoDecoderManager::ReleaseMediaResources() {
  GVDM_LOG("ReleseMediaResources");
  if (mDecoder == nullptr) {
    return;
  }
  ReleaseAllPendingVideoBuffers();
  mDecoder->ReleaseMediaResources();
}
void
GonkVideoDecoderManager::codecReserved()
{
  if (mInitPromise.IsEmpty()) {
    return;
  }
  GVDM_LOG("codecReserved");
  sp<AMessage> format = new AMessage;
  sp<Surface> surface;
  status_t rv = OK;
  // Fixed values
  GVDM_LOG("Configure video mime type: %s, width:%d, height:%d", mMimeType.get(), mVideoWidth, mVideoHeight);
  format->setString("mime", mMimeType.get());
  format->setInt32("width", mVideoWidth);
  format->setInt32("height", mVideoHeight);
  // Set the "moz-use-undequeued-bufs" to use the undeque buffers to accelerate
  // the video decoding.
  format->setInt32("moz-use-undequeued-bufs", 1);
  if (mNativeWindow != nullptr) {
#if ANDROID_VERSION >= 21
    surface = new Surface(mGraphicBufferProducer);
#else
    surface = new Surface(mNativeWindow->getBufferQueue());
#endif
  }
  mDecoder->configure(format, surface, nullptr, 0);
  mDecoder->Prepare();

  if (mMimeType.EqualsLiteral("video/mp4v-es")) {
    rv = mDecoder->Input(mCodecSpecificData->Elements(),
                         mCodecSpecificData->Length(), 0,
                         android::MediaCodec::BUFFER_FLAG_CODECCONFIG,
                         CODECCONFIG_TIMEOUT_US);
  }

  if (rv != OK) {
    GVDM_LOG("Failed to configure codec!!!!");
    mInitPromise.Reject(DecoderFailureReason::INIT_ERROR, __func__);
    return;
  }

  mInitPromise.Resolve(TrackType::kVideoTrack, __func__);
}
bool
GonkVideoDecoderManager::SetVideoFormat()
{
  // read video metadata from MediaCodec
  sp<AMessage> codecFormat;
  if (mDecoder->getOutputFormat(&codecFormat) == OK) {
    AString mime;
    int32_t width = 0;
    int32_t height = 0;
    int32_t stride = 0;
    int32_t slice_height = 0;
    int32_t color_format = 0;
    int32_t crop_left = 0;
    int32_t crop_top = 0;
    int32_t crop_right = 0;
    int32_t crop_bottom = 0;
    if (!codecFormat->findString("mime", &mime) ||
        !codecFormat->findInt32("width", &width) ||
        !codecFormat->findInt32("height", &height) ||
        !codecFormat->findInt32("stride", &stride) ||
        !codecFormat->findInt32("slice-height", &slice_height) ||
        !codecFormat->findInt32("color-format", &color_format) ||
        !codecFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
      GVDM_LOG("Failed to find values");
      return false;
    }
    mFrameInfo.mWidth = width;
    mFrameInfo.mHeight = height;
    mFrameInfo.mStride = stride;
    mFrameInfo.mSliceHeight = slice_height;
    mFrameInfo.mColorFormat = color_format;

    nsIntSize displaySize(width, height);
    if (!IsValidVideoRegion(mInitialFrame, mPicture, displaySize)) {
      GVDM_LOG("It is not a valid region");
      return false;
    }
    return true;
  }
  GVDM_LOG("Fail to get output format");
  return false;
}
bool
GonkVideoDecoderManager::PerformFormatSpecificProcess(mp4_demuxer::MP4Sample* aSample)
{
  if (aSample != nullptr) {
    // We must prepare samples in AVC Annex B.
    if (!mp4_demuxer::AnnexB::ConvertSampleToAnnexB(aSample)) {
      GVDM_LOG("Failed to convert sample to annex B!");
      return false;
    }
  }

  return true;
}
nsRefPtr<MediaDataDecoder::InitPromise>
GonkVideoDecoderManager::Init(MediaDataDecoderCallback* aCallback)
{
  nsIntSize displaySize(mDisplayWidth, mDisplayHeight);
  nsIntRect pictureRect(0, 0, mVideoWidth, mVideoHeight);
  // Validate the container-reported frame and pictureRect sizes. This ensures
  // that our video frame creation code doesn't overflow.
  nsIntSize frameSize(mVideoWidth, mVideoHeight);
  if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
    GVDM_LOG("It is not a valid region");
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }

  mReaderCallback = aCallback;

  mReaderTaskQueue = AbstractThread::GetCurrent()->AsTaskQueue();
  MOZ_ASSERT(!mReaderTaskQueue);

  if (mLooper.get() != nullptr) {
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }
  // Create ALooper
  mLooper = new ALooper;
  mManagerLooper = new ALooper;
  mManagerLooper->setName("GonkVideoDecoderManager");
  // Register AMessage handler to ALooper.
  mManagerLooper->registerHandler(mHandler);
  // Start ALooper thread.
  if (mLooper->start() != OK || mManagerLooper->start() != OK ) {
    return InitPromise::CreateAndReject(DecoderFailureReason::INIT_ERROR, __func__);
  }
  nsRefPtr<InitPromise> p = mInitPromise.Ensure(__func__);
  mDecoder = MediaCodecProxy::CreateByType(mLooper, mMimeType.get(), false, mVideoListener);
  mDecoder->AsyncAskMediaCodec();

  uint32_t capability = MediaCodecProxy::kEmptyCapability;
  if (mDecoder->getCapability(&capability) == OK && (capability &
      MediaCodecProxy::kCanExposeGraphicBuffer)) {
    mNativeWindow = new GonkNativeWindow();
  }

  return p;
}
android::sp<MediaCodecProxy>
GonkVideoDecoderManager::Init(MediaDataDecoderCallback* aCallback)
{
  nsIntSize displaySize(mDisplayWidth, mDisplayHeight);
  nsIntRect pictureRect(0, 0, mVideoWidth, mVideoHeight);
  // Validate the container-reported frame and pictureRect sizes. This ensures
  // that our video frame creation code doesn't overflow.
  nsIntSize frameSize(mVideoWidth, mVideoHeight);
  if (!IsValidVideoRegion(frameSize, pictureRect, displaySize)) {
    GVDM_LOG("It is not a valid region");
    return nullptr;
  }

  mReaderCallback = aCallback;

  if (mLooper.get() != nullptr) {
    return nullptr;
  }
  // Create ALooper
  mLooper = new ALooper;
  mManagerLooper = new ALooper;
  mManagerLooper->setName("GonkVideoDecoderManager");
  // Register AMessage handler to ALooper.
  mManagerLooper->registerHandler(mHandler);
  // Start ALooper thread.
  if (mLooper->start() != OK || mManagerLooper->start() != OK ) {
    return nullptr;
  }
  mDecoder = MediaCodecProxy::CreateByType(mLooper, mMimeType.get(), false, mVideoListener);
  mDecoder->AskMediaCodecAndWait();
  uint32_t capability = MediaCodecProxy::kEmptyCapability;
  if (mDecoder->getCapability(&capability) == OK && (capability &
      MediaCodecProxy::kCanExposeGraphicBuffer)) {
    mNativeWindow = new GonkNativeWindow();
  }

  return mDecoder;
}
// Blocks until decoded sample is produced by the deoder.
nsresult
GonkVideoDecoderManager::Output(int64_t aStreamOffset,
                                nsRefPtr<MediaData>& aOutData)
{
  aOutData = nullptr;
  status_t err;
  if (mDecoder == nullptr) {
    GVDM_LOG("Decoder is not inited");
    return NS_ERROR_UNEXPECTED;
  }
  err = mDecoder->Output(&mVideoBuffer, READ_OUTPUT_BUFFER_TIMEOUT_US);

  switch (err) {
    case OK:
    {
      nsRefPtr<VideoData> data;
      nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data));
      if (rv == NS_ERROR_NOT_AVAILABLE) {
        // Decoder outputs a empty video buffer, try again
        return NS_ERROR_NOT_AVAILABLE;
      } else if (rv != NS_OK || data == nullptr) {
        GVDM_LOG("Failed to create VideoData");
        return NS_ERROR_UNEXPECTED;
      }
      aOutData = data;
      return NS_OK;
    }
    case android::INFO_FORMAT_CHANGED:
    {
      // If the format changed, update our cached info.
      GVDM_LOG("Decoder format changed");
      if (!SetVideoFormat()) {
        return NS_ERROR_UNEXPECTED;
      }
      return Output(aStreamOffset, aOutData);
    }
    case android::INFO_OUTPUT_BUFFERS_CHANGED:
    {
      if (mDecoder->UpdateOutputBuffers()) {
        return Output(aStreamOffset, aOutData);
      }
      return NS_ERROR_FAILURE;
    }
    case -EAGAIN:
    {
      return NS_ERROR_NOT_AVAILABLE;
    }
    case android::ERROR_END_OF_STREAM:
    {
      GVDM_LOG("Got the EOS frame!");
      nsRefPtr<VideoData> data;
      nsresult rv = CreateVideoData(aStreamOffset, getter_AddRefs(data));
      if (rv == NS_ERROR_NOT_AVAILABLE) {
        // For EOS, no need to do any thing.
        return NS_ERROR_ABORT;
      }
      if (rv != NS_OK || data == nullptr) {
        GVDM_LOG("Failed to create video data");
        return NS_ERROR_UNEXPECTED;
      }
      aOutData = data;
      return NS_ERROR_ABORT;
    }
    case -ETIMEDOUT:
    {
      GVDM_LOG("Timeout. can try again next time");
      return NS_ERROR_UNEXPECTED;
    }
    default:
    {
      GVDM_LOG("Decoder failed, err=%d", err);
      return NS_ERROR_UNEXPECTED;
    }
  }

  return NS_OK;
}
nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
{
  *v = nullptr;
  nsRefPtr<VideoData> data;
  int64_t timeUs;
  int32_t keyFrame;

  if (mVideoBuffer == nullptr) {
    GVDM_LOG("Video Buffer is not valid!");
    return NS_ERROR_UNEXPECTED;
  }

  if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
    GVDM_LOG("Decoder did not return frame time");
    return NS_ERROR_UNEXPECTED;
  }

  int64_t duration;
  nsresult rv = QueueFrameTimeOut(timeUs, duration);
  NS_ENSURE_SUCCESS(rv, rv);

  if (mVideoBuffer->range_length() == 0) {
    // Some decoders may return spurious empty buffers that we just want to ignore
    // quoted from Android's AwesomePlayer.cpp
    ReleaseVideoBuffer();
    return NS_ERROR_NOT_AVAILABLE;
  }

  if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
    keyFrame = 0;
  }

  gfx::IntRect picture = ToIntRect(mPicture);
  if (mFrameInfo.mWidth != mInitialFrame.width ||
      mFrameInfo.mHeight != mInitialFrame.height) {

    // Frame size is different from what the container reports. This is legal,
    // and we will preserve the ratio of the crop rectangle as it
    // was reported relative to the picture size reported by the container.
    picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
    picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
    picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
    picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
  }

  RefPtr<mozilla::layers::TextureClient> textureClient;

  if ((mVideoBuffer->graphicBuffer().get())) {
    textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
  }

  if (textureClient) {
    GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
    grallocClient->SetMediaBuffer(mVideoBuffer);
    textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);

    data = VideoData::Create(mInfo.mVideo,
                             mImageContainer,
                             aStreamOffset,
                             timeUs,
                             duration,
                             textureClient,
                             keyFrame,
                             -1,
                             picture);
  } else {
    if (!mVideoBuffer->data()) {
      GVDM_LOG("No data in Video Buffer!");
      return NS_ERROR_UNEXPECTED;
    }
    uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
    int32_t stride = mFrameInfo.mStride;
    int32_t slice_height = mFrameInfo.mSliceHeight;

    // Converts to OMX_COLOR_FormatYUV420Planar
    if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
      ARect crop;
      crop.top = 0;
      crop.bottom = mFrameInfo.mHeight;
      crop.left = 0;
      crop.right = mFrameInfo.mWidth;
      yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
      if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
          mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
          ReleaseVideoBuffer();
          GVDM_LOG("Color conversion failed!");
          return NS_ERROR_UNEXPECTED;
      }
        stride = mFrameInfo.mWidth;
        slice_height = mFrameInfo.mHeight;
    }

    size_t yuv420p_y_size = stride * slice_height;
    size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
    uint8_t *yuv420p_y = yuv420p_buffer;
    uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
    uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;

    // This is the approximate byte position in the stream.
    int64_t pos = aStreamOffset;

    VideoData::YCbCrBuffer b;
    b.mPlanes[0].mData = yuv420p_y;
    b.mPlanes[0].mWidth = mFrameInfo.mWidth;
    b.mPlanes[0].mHeight = mFrameInfo.mHeight;
    b.mPlanes[0].mStride = stride;
    b.mPlanes[0].mOffset = 0;
    b.mPlanes[0].mSkip = 0;

    b.mPlanes[1].mData = yuv420p_u;
    b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2;
    b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2;
    b.mPlanes[1].mStride = (stride + 1) / 2;
    b.mPlanes[1].mOffset = 0;
    b.mPlanes[1].mSkip = 0;

    b.mPlanes[2].mData = yuv420p_v;
    b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2;
    b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2;
    b.mPlanes[2].mStride = (stride + 1) / 2;
    b.mPlanes[2].mOffset = 0;
    b.mPlanes[2].mSkip = 0;

    data = VideoData::Create(
        mInfo.mVideo,
        mImageContainer,
        pos,
        timeUs,
        1, // We don't know the duration.
        b,
        keyFrame,
        -1,
        picture);
    ReleaseVideoBuffer();
  }

  data.forget(v);
  return NS_OK;
}
void
GonkVideoDecoderManager::codecCanceled()
{
  GVDM_LOG("codecCanceled");
  mInitPromise.RejectIfExists(DecoderFailureReason::CANCELED, __func__);
}
示例#14
0
already_AddRefed<VideoData>
GonkVideoDecoderManager::CreateVideoDataFromDataBuffer(MediaBuffer* aSource, gfx::IntRect& aPicture)
{
  if (!aSource->data()) {
    GVDM_LOG("No data in Video Buffer!");
    return nullptr;
  }
  uint8_t *yuv420p_buffer = (uint8_t *)aSource->data();
  int32_t stride = mFrameInfo.mStride;
  int32_t slice_height = mFrameInfo.mSliceHeight;

  // Converts to OMX_COLOR_FormatYUV420Planar
  if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
    ARect crop;
    crop.top = 0;
    crop.bottom = mFrameInfo.mHeight;
    crop.left = 0;
    crop.right = mFrameInfo.mWidth;
    yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
    if (mColorConverter.convertDecoderOutputToI420(aSource->data(),
        mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
        GVDM_LOG("Color conversion failed!");
        return nullptr;
    }
      stride = mFrameInfo.mWidth;
      slice_height = mFrameInfo.mHeight;
  }

  size_t yuv420p_y_size = stride * slice_height;
  size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
  uint8_t *yuv420p_y = yuv420p_buffer;
  uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
  uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;

  VideoData::YCbCrBuffer b;
  b.mPlanes[0].mData = yuv420p_y;
  b.mPlanes[0].mWidth = mFrameInfo.mWidth;
  b.mPlanes[0].mHeight = mFrameInfo.mHeight;
  b.mPlanes[0].mStride = stride;
  b.mPlanes[0].mOffset = 0;
  b.mPlanes[0].mSkip = 0;

  b.mPlanes[1].mData = yuv420p_u;
  b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2;
  b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2;
  b.mPlanes[1].mStride = (stride + 1) / 2;
  b.mPlanes[1].mOffset = 0;
  b.mPlanes[1].mSkip = 0;

  b.mPlanes[2].mData = yuv420p_v;
  b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2;
  b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2;
  b.mPlanes[2].mStride = (stride + 1) / 2;
  b.mPlanes[2].mOffset = 0;
  b.mPlanes[2].mSkip = 0;

  RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo,
                                             mImageContainer,
                                             0, // Filled later by caller.
                                             0, // Filled later by caller.
                                             1, // We don't know the duration.
                                             b,
                                             0, // Filled later by caller.
                                             -1,
                                             aPicture);

  return data.forget();
}
示例#15
0
nsresult
GonkVideoDecoderManager::CreateVideoData(MediaBuffer* aBuffer,
                                         int64_t aStreamOffset,
                                         VideoData **v)
{
  *v = nullptr;
  RefPtr<VideoData> data;
  int64_t timeUs;
  int32_t keyFrame;

  if (aBuffer == nullptr) {
    GVDM_LOG("Video Buffer is not valid!");
    return NS_ERROR_UNEXPECTED;
  }

  AutoReleaseMediaBuffer autoRelease(aBuffer, mDecoder.get());

  if (!aBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
    GVDM_LOG("Decoder did not return frame time");
    return NS_ERROR_UNEXPECTED;
  }

  if (mLastTime > timeUs) {
    GVDM_LOG("Output decoded sample time is revert. time=%lld", timeUs);
    return NS_ERROR_NOT_AVAILABLE;
  }
  mLastTime = timeUs;

  if (aBuffer->range_length() == 0) {
    // Some decoders may return spurious empty buffers that we just want to ignore
    // quoted from Android's AwesomePlayer.cpp
    return NS_ERROR_NOT_AVAILABLE;
  }

  if (!aBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
    keyFrame = 0;
  }

  gfx::IntRect picture = mPicture;
  if (mFrameInfo.mWidth != mInitialFrame.width ||
      mFrameInfo.mHeight != mInitialFrame.height) {

    // Frame size is different from what the container reports. This is legal,
    // and we will preserve the ratio of the crop rectangle as it
    // was reported relative to the picture size reported by the container.
    picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
    picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
    picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
    picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
  }

  if (aBuffer->graphicBuffer().get()) {
    data = CreateVideoDataFromGraphicBuffer(aBuffer, picture);
    if (data && !mNeedsCopyBuffer) {
      // RecycleCallback() will be responsible for release the buffer.
      autoRelease.forget();
    }
    mNeedsCopyBuffer = false;
  } else {
    data = CreateVideoDataFromDataBuffer(aBuffer, picture);
  }

  if (!data) {
    return NS_ERROR_UNEXPECTED;
  }
  // Fill necessary info.
  data->mOffset = aStreamOffset;
  data->mTime = timeUs;
  data->mKeyframe = keyFrame;

  data.forget(v);
  return NS_OK;
}