Пример #1
0
void
CanvasClientSurfaceStream::Update(gfx::IntSize aSize, ClientCanvasLayer* aLayer)
{
  GLScreenBuffer* screen = aLayer->mGLContext->Screen();
  SurfaceStream* stream = screen->Stream();

  bool isCrossProcess = !(XRE_GetProcessType() == GeckoProcessType_Default);
  bool bufferCreated = false;
  if (isCrossProcess) {
#ifdef MOZ_WIDGET_GONK
    SharedSurface* surf = stream->SwapConsumer();
    if (!surf) {
      printf_stderr("surf is null post-SwapConsumer!\n");
      return;
    }

    if (surf->Type() != SharedSurfaceType::Gralloc) {
      printf_stderr("Unexpected non-Gralloc SharedSurface in IPC path!");
      MOZ_ASSERT(false);
      return;
    }

    SharedSurface_Gralloc* grallocSurf = SharedSurface_Gralloc::Cast(surf);

    GrallocTextureClientOGL* grallocTextureClient =
      static_cast<GrallocTextureClientOGL*>(grallocSurf->GetTextureClient());

    // If IPDLActor is null means this TextureClient didn't AddTextureClient yet
    if (!grallocTextureClient->GetIPDLActor()) {
      grallocTextureClient->SetTextureFlags(mTextureInfo.mTextureFlags);
      AddTextureClient(grallocTextureClient);
    }

    if (grallocTextureClient->GetIPDLActor()) {
      GetForwarder()->UseTexture(this, grallocTextureClient);
    }
#else
    printf_stderr("isCrossProcess, but not MOZ_WIDGET_GONK! Someone needs to write some code!");
    MOZ_ASSERT(false);
#endif
  } else {
    if (!mBuffer) {
      StreamTextureClientOGL* textureClient =
        new StreamTextureClientOGL(mTextureInfo.mTextureFlags);
      textureClient->InitWith(stream);
      mBuffer = textureClient;
      bufferCreated = true;
    }

    if (bufferCreated && !AddTextureClient(mBuffer)) {
      mBuffer = nullptr;
    }

    if (mBuffer) {
      GetForwarder()->UseTexture(this, mBuffer);
    }
  }

  aLayer->Painted();
}
/* static */
void
GonkVideoDecoderManager::RecycleCallback(TextureClient* aClient, void* aClosure)
{
  GonkVideoDecoderManager* videoManager = static_cast<GonkVideoDecoderManager*>(aClosure);
  GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
  aClient->ClearRecycleCallback();
  videoManager->PostReleaseVideoBuffer(client->GetMediaBuffer());
}
/* static */
void
GonkVideoDecoderManager::RecycleCallback(TextureClient* aClient, void* aClosure)
{
  MOZ_ASSERT(aClient && !aClient->IsDead());
  GonkVideoDecoderManager* videoManager = static_cast<GonkVideoDecoderManager*>(aClosure);
  GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
  aClient->ClearRecycleCallback();
  FenceHandle handle = aClient->GetAndResetReleaseFenceHandle();
  videoManager->PostReleaseVideoBuffer(client->GetMediaBuffer(), handle);
}
Пример #4
0
already_AddRefed<VideoData>
GonkVideoDecoderManager::CreateVideoDataFromGraphicBuffer(MediaBuffer* aSource,
                                                          gfx::IntRect& aPicture)
{
  sp<GraphicBuffer> srcBuffer(aSource->graphicBuffer());
  RefPtr<TextureClient> textureClient;

  if (mNeedsCopyBuffer) {
    // Copy buffer contents for bug 1199809.
    if (!mCopyAllocator) {
      mCopyAllocator = new TextureClientRecycleAllocator(ImageBridgeChild::GetSingleton());
    }
    if (!mCopyAllocator) {
      GVDM_LOG("Create buffer allocator failed!");
      return nullptr;
    }

    gfx::IntSize size(Align(aPicture.width, 2) , Align(aPicture.height, 2));
    textureClient =
      mCopyAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV, size,
                                      BackendSelector::Content,
                                      TextureFlags::DEFAULT,
                                      ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
    if (!textureClient) {
      GVDM_LOG("Copy buffer allocation failed!");
      return nullptr;
    }
    // Update size to match buffer's.
    aPicture.width = size.width;
    aPicture.height = size.height;

    sp<GraphicBuffer> destBuffer =
      static_cast<GrallocTextureClientOGL*>(textureClient.get())->GetGraphicBuffer();

    CopyGraphicBuffer(srcBuffer, destBuffer);
  } else {
    textureClient = mNativeWindow->getTextureClientFromBuffer(srcBuffer.get());
    textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
    GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
    grallocClient->SetMediaBuffer(aSource);
  }

  RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo,
                                             mImageContainer,
                                             0, // Filled later by caller.
                                             0, // Filled later by caller.
                                             1, // No way to pass sample duration from muxer to
                                                // OMX codec, so we hardcode the duration here.
                                             textureClient,
                                             false, // Filled later by caller.
                                             -1,
                                             aPicture);
  return data.forget();
}
Пример #5
0
void OmxDecoder::ReleaseMediaResources() {
  mMediaResourcePromise.RejectIfExists(true, __func__);

  ReleaseVideoBuffer();
  ReleaseAudioBuffer();

  {
    Mutex::Autolock autoLock(mPendingVideoBuffersLock);
    MOZ_ASSERT(mPendingRecycleTexutreClients.empty());
    // Release all pending recycle TextureClients, if they are not recycled yet.
    // This should not happen. See Bug 1042308.
    if (!mPendingRecycleTexutreClients.empty()) {
      printf_stderr("OmxDecoder::ReleaseMediaResources -- TextureClients are not recycled yet\n");
      for (std::set<TextureClient*>::iterator it=mPendingRecycleTexutreClients.begin();
           it!=mPendingRecycleTexutreClients.end(); it++)
      {
        GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(*it);
        client->ClearRecycleCallback();
        if (client->GetMediaBuffer()) {
          mPendingVideoBuffers.push(BufferItem(client->GetMediaBuffer(), client->GetAndResetReleaseFenceHandle()));
        }
      }
      mPendingRecycleTexutreClients.clear();
    }
  }

  {
    // Free all pending video buffers.
    Mutex::Autolock autoLock(mSeekLock);
    ReleaseAllPendingVideoBuffersLocked();
  }

  if (mVideoSource.get()) {
    mVideoSource->stop();
    mVideoSource.clear();
  }

  if (mAudioSource.get()) {
    mAudioSource->stop();
    mAudioSource.clear();
  }

  mNativeWindowClient.clear();
  mNativeWindow.clear();

  // Reset this variable to make the first seek go to the previous keyframe
  // when resuming
  mLastSeekTime = -1;
}
Пример #6
0
void OmxDecoder::ReleaseMediaResources() {
  ReleaseVideoBuffer();
  ReleaseAudioBuffer();

  {
    Mutex::Autolock autoLock(mPendingVideoBuffersLock);
    MOZ_ASSERT(mPendingRecycleTexutreClients.empty());
    // Release all pending recycle TextureClients, if they are not recycled yet.
    // This should not happen. See Bug 1042308.
    if (!mPendingRecycleTexutreClients.empty()) {
      printf_stderr("OmxDecoder::ReleaseMediaResources -- TextureClients are not recycled yet\n");
      for (std::set<TextureClient*>::iterator it=mPendingRecycleTexutreClients.begin();
           it!=mPendingRecycleTexutreClients.end(); it++)
      {
        GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(*it);
        client->ClearRecycleCallback();
        if (client->GetMediaBuffer()) {
          mPendingVideoBuffers.push(BufferItem(client->GetMediaBuffer(), client->GetReleaseFenceHandle()));
        }
      }
      mPendingRecycleTexutreClients.clear();
    }
  }

  {
    // Free all pending video buffers.
    Mutex::Autolock autoLock(mSeekLock);
    ReleaseAllPendingVideoBuffersLocked();
  }

  if (mVideoSource.get()) {
    mVideoSource->stop();
    mVideoSource.clear();
  }

  if (mAudioSource.get()) {
    mAudioSource->stop();
    mAudioSource.clear();
  }

  mNativeWindowClient.clear();
  mNativeWindow.clear();
}
Пример #7
0
void OmxDecoder::RecycleCallbackImp(TextureClient* aClient)
{
  aClient->ClearRecycleCallback();
  {
    Mutex::Autolock autoLock(mPendingVideoBuffersLock);
    if (mPendingRecycleTexutreClients.find(aClient) == mPendingRecycleTexutreClients.end()) {
      printf_stderr("OmxDecoder::RecycleCallbackImp -- TextureClient is not pending recycle\n");
      return;
    }
    mPendingRecycleTexutreClients.erase(aClient);
    GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
    if (client->GetMediaBuffer()) {
      mPendingVideoBuffers.push(BufferItem(client->GetMediaBuffer(), client->GetReleaseFenceHandle()));
    }
  }
  sp<AMessage> notify =
            new AMessage(kNotifyPostReleaseVideoBuffer, mReflector->id());
  // post AMessage to OmxDecoder via ALooper.
  notify->post();
}
nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
{
  *v = nullptr;
  nsRefPtr<VideoData> data;
  int64_t timeUs;
  int32_t keyFrame;

  if (mVideoBuffer == nullptr) {
    GVDM_LOG("Video Buffer is not valid!");
    return NS_ERROR_UNEXPECTED;
  }

  if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
    GVDM_LOG("Decoder did not return frame time");
    return NS_ERROR_UNEXPECTED;
  }

  int64_t duration;
  nsresult rv = QueueFrameTimeOut(timeUs, duration);
  NS_ENSURE_SUCCESS(rv, rv);

  if (mVideoBuffer->range_length() == 0) {
    // Some decoders may return spurious empty buffers that we just want to ignore
    // quoted from Android's AwesomePlayer.cpp
    ReleaseVideoBuffer();
    return NS_ERROR_NOT_AVAILABLE;
  }

  if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
    keyFrame = 0;
  }

  gfx::IntRect picture = ToIntRect(mPicture);
  if (mFrameInfo.mWidth != mInitialFrame.width ||
      mFrameInfo.mHeight != mInitialFrame.height) {

    // Frame size is different from what the container reports. This is legal,
    // and we will preserve the ratio of the crop rectangle as it
    // was reported relative to the picture size reported by the container.
    picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
    picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
    picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
    picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
  }

  RefPtr<mozilla::layers::TextureClient> textureClient;

  if ((mVideoBuffer->graphicBuffer().get())) {
    textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
  }

  if (textureClient) {
    GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
    grallocClient->SetMediaBuffer(mVideoBuffer);
    textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);

    data = VideoData::Create(mInfo.mVideo,
                             mImageContainer,
                             aStreamOffset,
                             timeUs,
                             duration,
                             textureClient,
                             keyFrame,
                             -1,
                             picture);
  } else {
    if (!mVideoBuffer->data()) {
      GVDM_LOG("No data in Video Buffer!");
      return NS_ERROR_UNEXPECTED;
    }
    uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
    int32_t stride = mFrameInfo.mStride;
    int32_t slice_height = mFrameInfo.mSliceHeight;

    // Converts to OMX_COLOR_FormatYUV420Planar
    if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
      ARect crop;
      crop.top = 0;
      crop.bottom = mFrameInfo.mHeight;
      crop.left = 0;
      crop.right = mFrameInfo.mWidth;
      yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
      if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
          mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
          ReleaseVideoBuffer();
          GVDM_LOG("Color conversion failed!");
          return NS_ERROR_UNEXPECTED;
      }
        stride = mFrameInfo.mWidth;
        slice_height = mFrameInfo.mHeight;
    }

    size_t yuv420p_y_size = stride * slice_height;
    size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
    uint8_t *yuv420p_y = yuv420p_buffer;
    uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
    uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;

    // This is the approximate byte position in the stream.
    int64_t pos = aStreamOffset;

    VideoData::YCbCrBuffer b;
    b.mPlanes[0].mData = yuv420p_y;
    b.mPlanes[0].mWidth = mFrameInfo.mWidth;
    b.mPlanes[0].mHeight = mFrameInfo.mHeight;
    b.mPlanes[0].mStride = stride;
    b.mPlanes[0].mOffset = 0;
    b.mPlanes[0].mSkip = 0;

    b.mPlanes[1].mData = yuv420p_u;
    b.mPlanes[1].mWidth = (mFrameInfo.mWidth + 1) / 2;
    b.mPlanes[1].mHeight = (mFrameInfo.mHeight + 1) / 2;
    b.mPlanes[1].mStride = (stride + 1) / 2;
    b.mPlanes[1].mOffset = 0;
    b.mPlanes[1].mSkip = 0;

    b.mPlanes[2].mData = yuv420p_v;
    b.mPlanes[2].mWidth =(mFrameInfo.mWidth + 1) / 2;
    b.mPlanes[2].mHeight = (mFrameInfo.mHeight + 1) / 2;
    b.mPlanes[2].mStride = (stride + 1) / 2;
    b.mPlanes[2].mOffset = 0;
    b.mPlanes[2].mSkip = 0;

    data = VideoData::Create(
        mInfo.mVideo,
        mImageContainer,
        pos,
        timeUs,
        1, // We don't know the duration.
        b,
        keyFrame,
        -1,
        picture);
    ReleaseVideoBuffer();
  }

  data.forget(v);
  return NS_OK;
}
Пример #9
0
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aTimeUs,
                           bool aKeyframeSkip, bool aDoSeek)
{
  if (!mVideoSource.get())
    return false;

  ReleaseVideoBuffer();

  status_t err;

  if (aDoSeek) {
    {
      Mutex::Autolock autoLock(mSeekLock);
      ReleaseAllPendingVideoBuffersLocked();
      mIsVideoSeeking = true;
    }
    MediaSource::ReadOptions options;
    options.setSeekTo(aTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
    err = mVideoSource->read(&mVideoBuffer, &options);
    {
      Mutex::Autolock autoLock(mSeekLock);
      mIsVideoSeeking = false;
      PostReleaseVideoBuffer(nullptr, FenceHandle());
    }

    aDoSeek = false;
  } else {
    err = mVideoSource->read(&mVideoBuffer);
  }

  aFrame->mSize = 0;

  if (err == OK) {
    int64_t timeUs;
    int32_t unreadable;
    int32_t keyFrame;

    size_t length = mVideoBuffer->range_length();

    if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
      NS_WARNING("OMX decoder did not return frame time");
      return false;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
      keyFrame = 0;
    }

    if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
      unreadable = 0;
    }

    RefPtr<mozilla::layers::TextureClient> textureClient;
    if ((mVideoBuffer->graphicBuffer().get())) {
      textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
    }

    if (textureClient) {
      // Manually increment reference count to keep MediaBuffer alive
      // during TextureClient is in use.
      mVideoBuffer->add_ref();
      GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
      grallocClient->SetMediaBuffer(mVideoBuffer);
      // Set recycle callback for TextureClient
      textureClient->SetRecycleCallback(OmxDecoder::RecycleCallback, this);
      {
        Mutex::Autolock autoLock(mPendingVideoBuffersLock);
        // Store pending recycle TextureClient.
        MOZ_ASSERT(mPendingRecycleTexutreClients.find(textureClient) == mPendingRecycleTexutreClients.end());
        mPendingRecycleTexutreClients.insert(textureClient);
      }

      aFrame->mGraphicBuffer = textureClient;
      aFrame->mRotation = mVideoRotation;
      aFrame->mTimeUs = timeUs;
      aFrame->mKeyFrame = keyFrame;
      aFrame->Y.mWidth = mVideoWidth;
      aFrame->Y.mHeight = mVideoHeight;
      // Release to hold video buffer in OmxDecoder more.
      // MediaBuffer's ref count is changed from 2 to 1.
      ReleaseVideoBuffer();
    } else if (length > 0) {
      char *data = static_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();

      if (unreadable) {
        LOG(PR_LOG_DEBUG, "video frame is unreadable");
      }

      if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
        return false;
      }
    }
    // Check if this frame is valid or not. If not, skip it.
    if ((aKeyframeSkip && timeUs < aTimeUs) || length == 0) {
      aFrame->mShouldSkip = true;
    }
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    if (!SetVideoFormat()) {
      return false;
    } else {
      return ReadVideo(aFrame, aTimeUs, aKeyframeSkip, aDoSeek);
    }
  }
  else if (err == ERROR_END_OF_STREAM) {
    return false;
  }
  else if (err == -ETIMEDOUT) {
    LOG(PR_LOG_DEBUG, "OmxDecoder::ReadVideo timed out, will retry");
    return true;
  }
  else {
    // UNKNOWN_ERROR is sometimes is used to mean "out of memory", but
    // regardless, don't keep trying to decode if the decoder doesn't want to.
    LOG(PR_LOG_DEBUG, "OmxDecoder::ReadVideo failed, err=%d", err);
    return false;
  }

  return true;
}
Пример #10
0
SharedSurface_Gralloc*
SharedSurface_Gralloc::Create(GLContext* prodGL,
                              const GLFormats& formats,
                              const gfx::IntSize& size,
                              bool hasAlpha,
                              ISurfaceAllocator* allocator)
{
    static bool runOnce = true;
    if (runOnce) {
        sForceReadPixelsToFence = false;
        mozilla::Preferences::AddBoolVarCache(&sForceReadPixelsToFence,
                                              "gfx.gralloc.fence-with-readpixels");
        runOnce = false;
    }

    GLLibraryEGL* egl = &sEGLLibrary;
    MOZ_ASSERT(egl);

    DEBUG_PRINT("SharedSurface_Gralloc::Create -------\n");

    if (!HasExtensions(egl, prodGL))
        return nullptr;

    gfxContentType type = hasAlpha ? GFX_CONTENT_COLOR_ALPHA
                                   : GFX_CONTENT_COLOR;

    gfxImageFormat format
      = gfxPlatform::GetPlatform()->OptimalFormatForContent(type);

    GrallocTextureClientOGL* grallocTC =
      new GrallocTextureClientOGL(
          allocator,
          gfx::ImageFormatToSurfaceFormat(format),
          TEXTURE_FLAGS_DEFAULT);

    if (!grallocTC->AllocateForGLRendering(size)) {
      return nullptr;
    }

    sp<GraphicBuffer> buffer = grallocTC->GetGraphicBuffer();

    EGLDisplay display = egl->Display();
    EGLClientBuffer clientBuffer = buffer->getNativeBuffer();
    EGLint attrs[] = {
        LOCAL_EGL_NONE, LOCAL_EGL_NONE
    };
    EGLImage image = egl->fCreateImage(display,
                                       EGL_NO_CONTEXT,
                                       LOCAL_EGL_NATIVE_BUFFER_ANDROID,
                                       clientBuffer, attrs);
    if (!image) {
        grallocTC->DropTextureData()->DeallocateSharedData(allocator);
        return nullptr;
    }

    prodGL->MakeCurrent();
    GLuint prodTex = 0;
    prodGL->fGenTextures(1, &prodTex);
    ScopedBindTexture autoTex(prodGL, prodTex);

    prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_MIN_FILTER, LOCAL_GL_LINEAR);
    prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_MAG_FILTER, LOCAL_GL_LINEAR);
    prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_WRAP_S, LOCAL_GL_CLAMP_TO_EDGE);
    prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_WRAP_T, LOCAL_GL_CLAMP_TO_EDGE);

    prodGL->fEGLImageTargetTexture2D(LOCAL_GL_TEXTURE_2D, image);

    egl->fDestroyImage(display, image);

    SharedSurface_Gralloc *surf = new SharedSurface_Gralloc(prodGL, size, hasAlpha, egl, allocator, grallocTC, prodTex);

    DEBUG_PRINT("SharedSurface_Gralloc::Create: success -- surface %p, GraphicBuffer %p.\n", surf, buffer.get());

    return surf;
}