Esempio n. 1
0
HRESULT CopyLAVFrame(LAVFrame *pSrc, LAVFrame **ppDst)
{
  ASSERT(pSrc->format != LAVPixFmt_DXVA2);
  *ppDst = (LAVFrame *)CoTaskMemAlloc(sizeof(LAVFrame));
  if (!*ppDst) return E_OUTOFMEMORY;
  **ppDst = *pSrc;

  (*ppDst)->destruct  = nullptr;
  (*ppDst)->priv_data = nullptr;

  AllocLAVFrameBuffers(*ppDst);

  LAVPixFmtDesc desc = getPixelFormatDesc(pSrc->format);
  for (int plane = 0; plane < desc.planes; plane++) {
    size_t linesize = (pSrc->width / desc.planeWidth[plane]) * desc.codedbytes;
    BYTE *dst = (*ppDst)->data[plane];
    BYTE *src = pSrc->data[plane];
    if (!dst || !src)
      return E_FAIL;
    for (int i = 0; i < (pSrc->height / desc.planeHeight[plane]); i++) {
      memcpy(dst, src, linesize);
      dst += (*ppDst)->stride[plane];
      src += pSrc->stride[plane];
    }
  }

  return S_OK;
}
Esempio n. 2
0
__forceinline bool CDecDXVA2::CopyFrame(LAVFrame *pFrame)
{
  HRESULT hr;
  LPDIRECT3DSURFACE9 pSurface = (LPDIRECT3DSURFACE9)pFrame->data[3];
  pFrame->format = LAVPixFmt_NV12;

  D3DSURFACE_DESC surfaceDesc;
  pSurface->GetDesc(&surfaceDesc);

  D3DLOCKED_RECT LockedRect;
  hr = pSurface->LockRect(&LockedRect, NULL, D3DLOCK_READONLY);
  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"pSurface->LockRect failed (hr: %X)", hr));
    return false;
  }

  // Free AVFrame based buffers again
  FreeLAVFrameBuffers(pFrame);
  // Allocate memory buffers
  AllocLAVFrameBuffers(pFrame, LockedRect.Pitch);
  // Copy surface onto memory buffers
  CopyFrameNV12((BYTE *)LockedRect.pBits, pFrame->data[0], pFrame->data[1], surfaceDesc.Height, pFrame->height, LockedRect.Pitch);

  pSurface->UnlockRect();

  return true;
}
Esempio n. 3
0
HRESULT CopyLAVFrame(LAVFrame *pSrc, LAVFrame **ppDst)
{
  ASSERT(pSrc->format != LAVPixFmt_DXVA2);
  *ppDst = (LAVFrame *)CoTaskMemAlloc(sizeof(LAVFrame));
  if (!*ppDst) return E_OUTOFMEMORY;
  **ppDst = *pSrc;

  (*ppDst)->destruct  = nullptr;
  (*ppDst)->priv_data = nullptr;

  HRESULT hr = AllocLAVFrameBuffers(*ppDst);
  if (FAILED(hr))
    return hr;

  LAVPixFmtDesc desc = getPixelFormatDesc(pSrc->format);
  for (int plane = 0; plane < desc.planes; plane++) {
    size_t linesize = (pSrc->width / desc.planeWidth[plane]) * desc.codedbytes;
    BYTE *dst = (*ppDst)->data[plane];
    BYTE *src = pSrc->data[plane];
    if (!dst || !src)
      return E_FAIL;
    for (int i = 0; i < (pSrc->height / desc.planeHeight[plane]); i++) {
      memcpy(dst, src, linesize);
      dst += (*ppDst)->stride[plane];
      src += pSrc->stride[plane];
    }

    if (pSrc->flags & LAV_FRAME_FLAG_MVC) {
      dst = (*ppDst)->stereo[plane];
      src = pSrc->stereo[plane];
      if (!dst || !src)
        return E_FAIL;
      for (int i = 0; i < (pSrc->height / desc.planeHeight[plane]); i++) {
        memcpy(dst, src, linesize);
        dst += (*ppDst)->stride[plane];
        src += pSrc->stride[plane];
      }
    }
  }

  (*ppDst)->side_data = nullptr;
  (*ppDst)->side_data_count = 0;
  for (int i = 0; i < pSrc->side_data_count; i++)
  {
    BYTE * p = AddLAVFrameSideData(*ppDst, pSrc->side_data[i].guidType, pSrc->side_data[i].size);
    if (p)
      memcpy(p, pSrc->side_data[i].data, pSrc->side_data[i].size);
  }

  return S_OK;
}
Esempio n. 4
0
STDMETHODIMP CDecAvcodec::ConvertPixFmt(AVFrame *pFrame, LAVFrame *pOutFrame)
{
  // Allocate the buffers to write into
  AllocLAVFrameBuffers(pOutFrame);

  // Map to swscale compatible format
  AVPixelFormat dstFormat = getFFPixelFormatFromLAV(pOutFrame->format, pOutFrame->bpp);

  // Get a context
  m_pSwsContext = sws_getCachedContext(m_pSwsContext, pFrame->width, pFrame->height, (AVPixelFormat)pFrame->format, pFrame->width, pFrame->height, dstFormat, SWS_BILINEAR | SWS_PRINT_INFO, NULL, NULL, NULL);

  // Perform conversion
  sws_scale(m_pSwsContext, pFrame->data, pFrame->linesize, 0, pFrame->height, pOutFrame->data, pOutFrame->stride);

  return S_OK;
}
Esempio n. 5
0
STDMETHODIMP CDecAvcodec::ConvertPixFmt(AVFrame *pFrame, LAVFrame *pOutFrame)
{
  // Allocate the buffers to write into
  AllocLAVFrameBuffers(pOutFrame);

  // Map to swscale compatible format
  AVPixelFormat dstFormat = getFFPixelFormatFromLAV(pOutFrame->format, pOutFrame->bpp);

  // Get a context
  m_pSwsContext = sws_getCachedContext(m_pSwsContext, pFrame->width, pFrame->height, (AVPixelFormat)pFrame->format, pFrame->width, pFrame->height, dstFormat, SWS_BILINEAR | SWS_FULL_CHR_H_INT | SWS_PRINT_INFO, nullptr, nullptr, nullptr);

  ptrdiff_t linesize[4];
  for (int i = 0; i < 4; i++)
    linesize[i] = pFrame->linesize[i];

  // Perform conversion
  sws_scale2(m_pSwsContext, pFrame->data, linesize, 0, pFrame->height, pOutFrame->data, pOutFrame->stride);

  return S_OK;
}
Esempio n. 6
0
STDMETHODIMP CDecWMV9::ProcessOutput()
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  BYTE *pBuffer = GetBuffer(m_pRawBufferSize);
  CMediaBuffer *pOutBuffer = new CMediaBuffer(pBuffer, m_pRawBufferSize, true);
  pOutBuffer->SetLength(0);

  DMO_OUTPUT_DATA_BUFFER OutputBufferStructs[1];
  memset(&OutputBufferStructs[0], 0, sizeof(DMO_OUTPUT_DATA_BUFFER));
  OutputBufferStructs[0].pBuffer  = pOutBuffer;

  hr = m_pDMO->ProcessOutput(0, 1, OutputBufferStructs, &dwStatus);
  if (FAILED(hr)) {
    ReleaseBuffer(pBuffer);
    DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr));
    return S_FALSE;
  }
  if (hr == S_FALSE) {
    ReleaseBuffer(pBuffer);
    return S_FALSE;
  }

  LAVFrame *pFrame = NULL;
  AllocateFrame(&pFrame);

  BITMAPINFOHEADER *pBMI = NULL;
  videoFormatTypeHandler(mtOut, &pBMI);
  pFrame->width     = pBMI->biWidth;
  pFrame->height    = pBMI->biHeight;
  pFrame->format    = m_OutPixFmt;
  pFrame->key_frame = (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_SYNCPOINT);

  AVRational display_aspect_ratio;
  int64_t num = (int64_t)m_StreamAR.num * pBMI->biWidth;
  int64_t den = (int64_t)m_StreamAR.den * pBMI->biHeight;
  av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, num, den, 1 << 30);

  BYTE contentType = 0;
  DWORD dwPropSize = 1;
  pOutBuffer->GetProperty(WM_SampleExtensionGUID_ContentType, &contentType, &dwPropSize);
  pFrame->interlaced = !!(contentType & WM_CT_INTERLACED);
  pFrame->repeat     = !!(contentType & WM_CT_REPEAT_FIRST_FIELD);

  LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
  pFrame->tff           = (fo == DeintFieldOrder_Auto) ? !!(contentType & WM_CT_TOP_FIELD_FIRST) : (fo == DeintFieldOrder_TopFieldFirst);

  if (pFrame->interlaced && !m_bInterlaced)
    m_bInterlaced = TRUE;

  pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable);

  if (m_bManualReorder) {
    if (!m_timestampQueue.empty()) {
      pFrame->rtStart = m_timestampQueue.front();
      m_timestampQueue.pop();
      if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIMELENGTH) {
        pFrame->rtStop = pFrame->rtStart + OutputBufferStructs[0].rtTimelength;
      }
    }
  } else {
    if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIME) {
      pFrame->rtStart = OutputBufferStructs[0].rtTimestamp;
      if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIMELENGTH) {
        pFrame->rtStop = pFrame->rtStart + OutputBufferStructs[0].rtTimelength;
      }
    }
  }

  // Check alignment
  // If not properly aligned, we need to make the data aligned.
  int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32;
  if ((pFrame->width % alignment) != 0) {
    AllocLAVFrameBuffers(pFrame);
    size_t ySize = pFrame->width * pFrame->height;
    memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height);
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      memcpy_plane(pFrame->data[1], pBuffer+ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2);
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      size_t uvSize = ySize / 4;
      memcpy_plane(pFrame->data[2], pBuffer+ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2);
      memcpy_plane(pFrame->data[1], pBuffer+ySize+uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2);
    }
    ReleaseBuffer(pBuffer);
  } else {
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      pFrame->data[0] = pBuffer;
      pFrame->data[1] = pBuffer + pFrame->width * pFrame->height;
      pFrame->stride[0] = pFrame->stride[1] = pFrame->width;
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      pFrame->data[0] = pBuffer;
      pFrame->data[2] = pBuffer + pFrame->width * pFrame->height;
      pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2);
      pFrame->stride[0] = pFrame->width;
      pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2;
    }
    pFrame->destruct = wmv9_buffer_destruct;
    pFrame->priv_data = this;
  }
  pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY;
  Deliver(pFrame);

  SafeRelease(&pOutBuffer);

  if (OutputBufferStructs[0].dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE)
    return ProcessOutput();
  return hr;
}
Esempio n. 7
0
STDMETHODIMP CDecWMV9MFT::ProcessOutput()
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  MFT_OUTPUT_STREAM_INFO outputInfo = {0};
  m_pMFT->GetOutputStreamInfo(0, &outputInfo);

  IMFMediaBuffer *pMFBuffer = nullptr;
  ASSERT(!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES));

  MFT_OUTPUT_DATA_BUFFER OutputBuffer = {0};
  if (!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) {
    pMFBuffer = GetBuffer(outputInfo.cbSize);
    if (!pMFBuffer) { DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffere")); return E_FAIL; }
  
    IMFSample *pSampleOut = nullptr;
    hr = MF.CreateSample(&pSampleOut);
    if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"Unable to allocate MF sample, hr: 0x%x", hr)); ReleaseBuffer(pMFBuffer); return E_FAIL; }
    
    pSampleOut->AddBuffer(pMFBuffer);
    OutputBuffer.pSample = pSampleOut;
  }
  hr = m_pMFT->ProcessOutput(0, 1, &OutputBuffer, &dwStatus);

  // We don't process events, just release them
  SafeRelease(&OutputBuffer.pEvents);

  // handle stream format changes
  if (hr == MF_E_TRANSFORM_STREAM_CHANGE || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE ) {
    SafeRelease(&OutputBuffer.pSample);
    ReleaseBuffer(pMFBuffer);
    hr = SelectOutputType();
    if (FAILED(hr)) {
      DbgLog((LOG_TRACE, 10, L"-> Failed to handle stream change, hr: %x", hr));
      return E_FAIL;
    }
    // try again with the new type, it should work now!
    return ProcessOutput();
  }
  
  // the MFT generated no output, discard the sample and return
  if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE) {
    SafeRelease(&OutputBuffer.pSample);
    ReleaseBuffer(pMFBuffer);
    return S_FALSE;
  }
  
  // unknown error condition
  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr));
    SafeRelease(&OutputBuffer.pSample);
    ReleaseBuffer(pMFBuffer);
    return E_FAIL;
  }

  LAVFrame *pFrame = nullptr;
  AllocateFrame(&pFrame);

  IMFMediaType *pMTOut = nullptr;
  m_pMFT->GetOutputCurrentType(0, &pMTOut);

  MFGetAttributeSize(pMTOut, MF_MT_FRAME_SIZE, (UINT32 *)&pFrame->width, (UINT32 *)&pFrame->height);
  pFrame->format = m_OutPixFmt;

  AVRational pixel_aspect_ratio = {1, 1};
  MFGetAttributeRatio(pMTOut, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&pixel_aspect_ratio.num, (UINT32*)&pixel_aspect_ratio.den);

  AVRational display_aspect_ratio = {0, 0};
  av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, (int64_t)pixel_aspect_ratio.num * pFrame->width, (int64_t)pixel_aspect_ratio.den * pFrame->height, INT_MAX);
  pFrame->aspect_ratio = display_aspect_ratio;

  pFrame->interlaced = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_Interlaced,       FALSE);
  pFrame->repeat     = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_RepeatFirstField, FALSE);

  LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
  pFrame->tff = (fo == DeintFieldOrder_Auto) ? !MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_BottomFieldFirst, FALSE) : (fo == DeintFieldOrder_TopFieldFirst);

  if (pFrame->interlaced && !m_bInterlaced)
    m_bInterlaced = TRUE;

  pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable);

  pFrame->ext_format.VideoPrimaries         = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_PRIMARIES,     MFVideoPrimaries_Unknown);
  pFrame->ext_format.VideoTransferFunction  = MFGetAttributeUINT32(pMTOut, MF_MT_TRANSFER_FUNCTION,   MFVideoTransFunc_Unknown);
  pFrame->ext_format.VideoTransferMatrix    = MFGetAttributeUINT32(pMTOut, MF_MT_YUV_MATRIX,          MFVideoTransferMatrix_Unknown);
  pFrame->ext_format.VideoChromaSubsampling = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
  pFrame->ext_format.NominalRange           = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown);

  // HACK: don't flag range=limited if its the only value set, since its also the implied default, this helps to avoid a reconnect
  // The MFT always sets this value, even if the bitstream says nothing about it, causing a reconnect on every vc1/wmv3 file
  if (pFrame->ext_format.value == 0x2000)
    pFrame->ext_format.value = 0;

  // Timestamps
  if (m_bManualReorder) {
    if (!m_timestampQueue.empty()) {
      pFrame->rtStart = m_timestampQueue.front();
      m_timestampQueue.pop();
      
      LONGLONG llDuration = 0;
      hr = OutputBuffer.pSample->GetSampleDuration(&llDuration);
      if (SUCCEEDED(hr) && llDuration > 0) {
        pFrame->rtStop = pFrame->rtStart + llDuration;
      }
    }
  } else {
    LONGLONG llTimestamp = 0;
    hr = OutputBuffer.pSample->GetSampleTime(&llTimestamp);
    if (SUCCEEDED(hr)) {
      pFrame->rtStart = llTimestamp;
      
      LONGLONG llDuration = 0;
      hr = OutputBuffer.pSample->GetSampleDuration(&llDuration);
      if (SUCCEEDED(hr) && llDuration > 0) {
        pFrame->rtStop = pFrame->rtStart + llDuration;
      }
    }
  }

  SafeRelease(&pMTOut);

  // Lock memory in the buffer
  BYTE *pBuffer = nullptr;
  pMFBuffer->Lock(&pBuffer, NULL, NULL);

  // Check alignment
  // If not properly aligned, we need to make the data aligned.
  int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32;
  if ((pFrame->width % alignment) != 0) {
    hr = AllocLAVFrameBuffers(pFrame);
    if (FAILED(hr)) {
      pMFBuffer->Unlock();
      ReleaseBuffer(pMFBuffer);
      SafeRelease(&OutputBuffer.pSample);
      return hr;
    }
    size_t ySize = pFrame->width * pFrame->height;
    
    memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height);
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      memcpy_plane(pFrame->data[1], pBuffer + ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2);
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      size_t uvSize = ySize / 4;
      memcpy_plane(pFrame->data[2], pBuffer + ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2);
      memcpy_plane(pFrame->data[1], pBuffer + ySize + uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2);
    }
    pMFBuffer->Unlock();
    ReleaseBuffer(pMFBuffer);
  } else {
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      pFrame->data[0] = pBuffer;
      pFrame->data[1] = pBuffer + pFrame->width * pFrame->height;
      pFrame->stride[0] = pFrame->stride[1] = pFrame->width;
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      pFrame->data[0] = pBuffer;
      pFrame->data[2] = pBuffer + pFrame->width * pFrame->height;
      pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2);
      pFrame->stride[0] = pFrame->width;
      pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2;
    }
    pFrame->data[3] = (BYTE *)pMFBuffer;
    pFrame->destruct = wmv9_buffer_destruct;
    pFrame->priv_data = this;
  }
  pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY;
  Deliver(pFrame);

  SafeRelease(&OutputBuffer.pSample);

  if (OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE)
    return ProcessOutput();
  return hr;
}