Esempio n. 1
0
int CDecoder::FFGetBuffer(AVCodecContext* avctx, AVFrame* pic, int flags)
{
  ICallbackHWAccel* cb = static_cast<ICallbackHWAccel*>(avctx->opaque);
  CDecoder* decoder = static_cast<CDecoder*>(cb->GetHWAccel());

  return decoder->GetBuffer(avctx, pic);
}
Esempio n. 2
0
int CDecoder::FFGetBuffer(AVCodecContext *avctx, AVFrame *frame, int flags)
{
  ICallbackHWAccel* cb = static_cast<ICallbackHWAccel*>(avctx->opaque);
  CDecoder* dec = static_cast<CDecoder*>(cb->GetHWAccel());
  if (g_advancedSettings.CanLogComponent(LOGVIDEO))
    CLog::Log(LOGDEBUG,"%s::%s %dx%d format:%x:%x flags:%x", CLASSNAME, __FUNCTION__, frame->width, frame->height, frame->format, dec->m_fmt, flags);

  if ((avctx->codec && (avctx->codec->capabilities & AV_CODEC_CAP_DR1) == 0) || frame->format != dec->m_fmt)
  {
    assert(0);
    return avcodec_default_get_buffer2(avctx, frame, flags);
  }

  std::shared_ptr<CMMALPool> pool = std::dynamic_pointer_cast<CMMALPool>(dec->m_pool);
  if (!pool->IsConfigured())
  {
    int aligned_width = frame->width;
    int aligned_height = frame->height;
    // ffmpeg requirements
    AlignedSize(dec->m_avctx, aligned_width, aligned_height);
    pool->Configure(dec->m_fmt, frame->width, frame->height, aligned_width, aligned_height, 0);
  }
  CMMALYUVBuffer *YUVBuffer = dynamic_cast<CMMALYUVBuffer *>(pool->Get());
  if (!YUVBuffer || !YUVBuffer->mmal_buffer || !YUVBuffer->GetMem())
  {
    CLog::Log(LOGERROR,"%s::%s Failed to allocated buffer in time", CLASSNAME, __FUNCTION__);
    return -1;
  }

  CGPUMEM *gmem = YUVBuffer->GetMem();
  AVBufferRef *buf = av_buffer_create((uint8_t *)gmem->m_arm, gmem->m_numbytes, CDecoder::FFReleaseBuffer, gmem, AV_BUFFER_FLAG_READONLY);
  if (!buf)
  {
    CLog::Log(LOGERROR, "%s::%s av_buffer_create() failed", CLASSNAME, __FUNCTION__);
    YUVBuffer->Release();
    return -1;
  }

  uint8_t *planes[YuvImage::MAX_PLANES];
  int strides[YuvImage::MAX_PLANES];
  YUVBuffer->GetPlanes(planes);
  YUVBuffer->GetStrides(strides);

  for (int i = 0; i < AV_NUM_DATA_POINTERS; i++)
  {
    frame->data[i] = i < YuvImage::MAX_PLANES ? planes[i] : nullptr;
    frame->linesize[i] = i < YuvImage::MAX_PLANES ? strides[i] : 0;
    frame->buf[i] = i == 0 ? buf : nullptr;
  }

  frame->extended_data = frame->data;
  // Leave extended buf alone

  if (g_advancedSettings.CanLogComponent(LOGVIDEO))
    CLog::Log(LOGDEBUG,"%s::%s buf:%p mmal:%p gmem:%p avbuf:%p:%p:%p", CLASSNAME, __FUNCTION__, YUVBuffer, YUVBuffer->mmal_buffer, gmem, frame->data[0], frame->data[1], frame->data[2]);

  return 0;
}