STDMETHODIMP CDecWMV9::InitDecoder(AVCodecID codec, const CMediaType *pmt) { HRESULT hr = S_OK; DbgLog((LOG_TRACE, 10, L"CDecWMV9::InitDecoder(): Initializing WMV9 DMO decoder")); DestroyDecoder(false); BITMAPINFOHEADER *pBMI = NULL; REFERENCE_TIME rtAvg = 0; DWORD dwARX = 0, dwARY = 0; videoFormatTypeHandler(*pmt, &pBMI, &rtAvg, &dwARX, &dwARY); size_t extralen = 0; BYTE *extra = NULL; getExtraData(*pmt, NULL, &extralen); if (extralen > 0) { extra = (BYTE *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, &extralen); } if (codec == AV_CODEC_ID_VC1 && extralen) { size_t i = 0; for (i = 0; i < (extralen - 4); i++) { uint32_t code = AV_RB32(extra+i); if (IS_MARKER(code)) break; } if (i == 0) { memmove(extra+1, extra, extralen); *extra = 0; extralen++; } else if (i > 1) { DbgLog((LOG_TRACE, 10, L"-> VC-1 Header at position %u (should be 0 or 1)", i)); } } /* Create input type */ GUID subtype = codec == AV_CODEC_ID_VC1 ? MEDIASUBTYPE_WVC1 : MEDIASUBTYPE_WMV3; m_nCodecId = codec; mtIn.SetType(&MEDIATYPE_Video); mtIn.SetSubtype(&subtype); mtIn.SetFormatType(&FORMAT_VideoInfo); mtIn.SetTemporalCompression(TRUE); mtIn.SetSampleSize(0); mtIn.SetVariableSize(); VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)mtIn.AllocFormatBuffer((ULONG)(sizeof(VIDEOINFOHEADER) + extralen)); memset(vih, 0, sizeof(VIDEOINFOHEADER)); vih->bmiHeader.biWidth = pBMI->biWidth; vih->bmiHeader.biHeight = pBMI->biHeight; vih->bmiHeader.biPlanes = 1; vih->bmiHeader.biBitCount = 24; vih->bmiHeader.biSizeImage = pBMI->biWidth * pBMI->biHeight * 3 / 2; vih->bmiHeader.biSize = (DWORD)(sizeof(BITMAPINFOHEADER) + extralen); vih->bmiHeader.biCompression = subtype.Data1; vih->AvgTimePerFrame = rtAvg; SetRect(&vih->rcSource, 0, 0, pBMI->biWidth, pBMI->biHeight); vih->rcTarget = vih->rcSource; if (extralen > 0) { memcpy((BYTE *)vih + sizeof(VIDEOINFOHEADER), extra, extralen); av_freep(&extra); extra = (BYTE *)vih + sizeof(VIDEOINFOHEADER); } hr = m_pDMO->SetInputType(0, &mtIn, 0); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to set input type on DMO")); return hr; } /* Create output type */ int idx = 0; while(SUCCEEDED(hr = m_pDMO->GetOutputType(0, idx++, &mtOut))) { if (mtOut.subtype == MEDIASUBTYPE_NV12) { hr = m_pDMO->SetOutputType(0, &mtOut, 0); m_OutPixFmt = LAVPixFmt_NV12; break; } else if (mtOut.subtype == MEDIASUBTYPE_YV12) { hr = m_pDMO->SetOutputType(0, &mtOut, 0); m_OutPixFmt = LAVPixFmt_YUV420; break; } } if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to set output type on DMO")); return hr; } videoFormatTypeHandler(mtOut, &pBMI); m_pRawBufferSize = pBMI->biSizeImage + FF_INPUT_BUFFER_PADDING_SIZE; m_bInterlaced = FALSE; memset(&m_StreamAR, 0, sizeof(m_StreamAR)); if (extralen > 0) { m_vc1Header = new CVC1HeaderParser(extra, extralen, codec); if (m_vc1Header->hdr.valid) { m_bInterlaced = m_vc1Header->hdr.interlaced; m_StreamAR = m_vc1Header->hdr.ar; } } m_bManualReorder = (codec == AV_CODEC_ID_VC1) && !(m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_VC1_DTS); return S_OK; }
STDMETHODIMP CDecAvcodec::InitDecoder(AVCodecID codec, const CMediaType *pmt) { DestroyDecoder(); DbgLog((LOG_TRACE, 10, L"Initializing ffmpeg for codec %S", avcodec_get_name(codec))); BITMAPINFOHEADER *pBMI = nullptr; videoFormatTypeHandler((const BYTE *)pmt->Format(), pmt->FormatType(), &pBMI); m_pAVCodec = avcodec_find_decoder(codec); CheckPointer(m_pAVCodec, VFW_E_UNSUPPORTED_VIDEO); m_pAVCtx = avcodec_alloc_context3(m_pAVCodec); CheckPointer(m_pAVCtx, E_POINTER); DWORD dwDecFlags = m_pCallback->GetDecodeFlags(); // Use parsing for mpeg1/2 at all times, or H264/HEVC when its not from LAV Splitter if( codec == AV_CODEC_ID_MPEG1VIDEO || codec == AV_CODEC_ID_MPEG2VIDEO || (!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) && (pmt->subtype == MEDIASUBTYPE_H264 || pmt->subtype == MEDIASUBTYPE_h264 || pmt->subtype == MEDIASUBTYPE_X264 || pmt->subtype == MEDIASUBTYPE_x264 || pmt->subtype == MEDIASUBTYPE_H264_bis || pmt->subtype == MEDIASUBTYPE_HEVC))) { m_pParser = av_parser_init(codec); } LONG biRealWidth = pBMI->biWidth, biRealHeight = pBMI->biHeight; if (pmt->formattype == FORMAT_VideoInfo || pmt->formattype == FORMAT_MPEGVideo) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->Format(); if (vih->rcTarget.right != 0 && vih->rcTarget.bottom != 0) { biRealWidth = vih->rcTarget.right; biRealHeight = vih->rcTarget.bottom; } } else if (pmt->formattype == FORMAT_VideoInfo2 || pmt->formattype == FORMAT_MPEG2Video) { VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)pmt->Format(); if (vih2->rcTarget.right != 0 && vih2->rcTarget.bottom != 0) { biRealWidth = vih2->rcTarget.right; biRealHeight = vih2->rcTarget.bottom; } } m_pAVCtx->codec_id = codec; m_pAVCtx->codec_tag = pBMI->biCompression; m_pAVCtx->coded_width = pBMI->biWidth; m_pAVCtx->coded_height = abs(pBMI->biHeight); m_pAVCtx->bits_per_coded_sample = pBMI->biBitCount; m_pAVCtx->err_recognition = 0; m_pAVCtx->workaround_bugs = FF_BUG_AUTODETECT; m_pAVCtx->refcounted_frames = 1; // Setup threading // Thread Count. 0 = auto detect int thread_count = m_pSettings->GetNumThreads(); if (thread_count == 0) { thread_count = av_cpu_count(); } m_pAVCtx->thread_count = max(1, min(thread_count, AVCODEC_MAX_THREADS)); if (dwDecFlags & LAV_VIDEO_DEC_FLAG_NO_MT || codec == AV_CODEC_ID_MPEG4) { m_pAVCtx->thread_count = 1; } m_pFrame = av_frame_alloc(); CheckPointer(m_pFrame, E_POINTER); // Process Extradata BYTE *extra = nullptr; size_t extralen = 0; getExtraData(*pmt, nullptr, &extralen); BOOL bH264avc = FALSE; if (pmt->formattype == FORMAT_MPEG2Video && (m_pAVCtx->codec_tag == MAKEFOURCC('a','v','c','1') || m_pAVCtx->codec_tag == MAKEFOURCC('A','V','C','1') || m_pAVCtx->codec_tag == MAKEFOURCC('C','C','V','1'))) { // Reconstruct AVC1 extradata format DbgLog((LOG_TRACE, 10, L"-> Processing AVC1 extradata of %d bytes", extralen)); MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->Format(); extralen += 7; extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE); extra[0] = 1; extra[1] = (BYTE)mp2vi->dwProfile; extra[2] = 0; extra[3] = (BYTE)mp2vi->dwLevel; extra[4] = (BYTE)(mp2vi->dwFlags ? mp2vi->dwFlags : 4) - 1; // only process extradata if available uint8_t ps_count = 0; if (extralen > 7) { // Actually copy the metadata into our new buffer size_t actual_len; getExtraData(*pmt, extra + 6, &actual_len); // Count the number of SPS/PPS in them and set the length // We'll put them all into one block and add a second block with 0 elements afterwards // The parsing logic does not care what type they are, it just expects 2 blocks. BYTE *p = extra + 6, *end = extra + 6 + actual_len; BOOL bSPS = FALSE, bPPS = FALSE; while (p + 1 < end) { unsigned len = (((unsigned)p[0] << 8) | p[1]) + 2; if (p + len > end) { break; } if ((p[2] & 0x1F) == 7) bSPS = TRUE; if ((p[2] & 0x1F) == 8) bPPS = TRUE; ps_count++; p += len; } } extra[5] = ps_count; extra[extralen - 1] = 0; bH264avc = TRUE; m_pAVCtx->extradata = extra; m_pAVCtx->extradata_size = (int)extralen; } else if (extralen > 0) { DbgLog((LOG_TRACE, 10, L"-> Processing extradata of %d bytes", extralen)); if (pmt->subtype == MEDIASUBTYPE_LAV_RAWVIDEO) { if (extralen < sizeof(m_pAVCtx->pix_fmt)) { DbgLog((LOG_TRACE, 10, L"-> LAV RAW Video extradata is missing..")); } else { extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, nullptr); m_pAVCtx->pix_fmt = *(AVPixelFormat *)extra; extralen -= sizeof(AVPixelFormat); memmove(extra, extra+sizeof(AVPixelFormat), extralen); } } else if (codec == AV_CODEC_ID_VP9) { // read custom vpcC headers if (extralen >= 16) { extra = (uint8_t *)av_mallocz(extralen + AV_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, nullptr); if (AV_RB32(extra) == MKBETAG('v', 'p', 'c', 'C') && AV_RB8(extra + 4) == 1) { m_pAVCtx->profile = AV_RB8(extra + 8); m_pAVCtx->color_primaries = (AVColorPrimaries)AV_RB8(extra + 11); m_pAVCtx->color_trc = (AVColorTransferCharacteristic)AV_RB8(extra + 12); m_pAVCtx->colorspace = (AVColorSpace)AV_RB8(extra + 13); int bitdepth = AV_RB8(extra + 10) >> 4; if (m_pAVCtx->profile == 2 && bitdepth == 10) { m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P10; } else if (m_pAVCtx->profile == 2 && bitdepth == 12) { m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P12; } } av_freep(&extra); extralen = 0; }
STDMETHODIMP CDecWMV9MFT::InitDecoder(AVCodecID codec, const CMediaType *pmt) { HRESULT hr = S_OK; DbgLog((LOG_TRACE, 10, L"CDecWMV9MFT::InitDecoder(): Initializing WMV9 MFT decoder")); DestroyDecoder(false); BITMAPINFOHEADER *pBMI = nullptr; REFERENCE_TIME rtAvg = 0; DWORD dwARX = 0, dwARY = 0; videoFormatTypeHandler(*pmt, &pBMI, &rtAvg, &dwARX, &dwARY); size_t extralen = 0; BYTE *extra = nullptr; getExtraData(*pmt, nullptr, &extralen); if (extralen > 0) { extra = (BYTE *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, &extralen); } if (codec == AV_CODEC_ID_VC1 && extralen) { size_t i = 0; for (i = 0; i < (extralen - 4); i++) { uint32_t code = AV_RB32(extra + i); if ((code & ~0xFF) == 0x00000100) break; } if (i == 0) { memmove(extra + 1, extra, extralen); *extra = 0; extralen++; } else if (i > 1) { DbgLog((LOG_TRACE, 10, L"-> VC-1 Header at position %u (should be 0 or 1)", i)); } } if (extralen > 0) { m_vc1Header = new CVC1HeaderParser(extra, extralen, codec); } /* Create input type */ m_nCodecId = codec; IMFMediaType *pMTIn = nullptr; MF.CreateMediaType(&pMTIn); pMTIn->SetUINT32(MF_MT_COMPRESSED, TRUE); pMTIn->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, FALSE); pMTIn->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, FALSE); pMTIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); pMTIn->SetGUID(MF_MT_SUBTYPE, VerifySubtype(codec, pmt->subtype)); MFSetAttributeSize(pMTIn, MF_MT_FRAME_SIZE, pBMI->biWidth, pBMI->biHeight); UINT32 rateNum = 0, rateDen = 0; MF.AverageTimePerFrameToFrameRate(rtAvg, &rateNum, &rateDen); MFSetAttributeRatio(pMTIn, MF_MT_FRAME_RATE, rateNum, rateDen); pMTIn->SetBlob(MF_MT_USER_DATA, extra, (UINT32)extralen); av_freep(&extra); hr = m_pMFT->SetInputType(0, pMTIn, 0); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to set input type on MFT")); return hr; } /* Create output type */ hr = SelectOutputType(); SafeRelease(&pMTIn); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to set output type on MFT")); return hr; } IMFMediaType *pMTOut = nullptr; m_pMFT->GetOutputCurrentType(0, &pMTOut); m_bInterlaced = MFGetAttributeUINT32(pMTOut, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown) > MFVideoInterlace_Progressive; SafeRelease(&pMTOut); m_bManualReorder = (codec == AV_CODEC_ID_VC1) && !(m_pCallback->GetDecodeFlags() & LAV_VIDEO_DEC_FLAG_ONLY_DTS); return S_OK; }
STDMETHODIMP CDecAvcodec::InitDecoder(AVCodecID codec, const CMediaType *pmt) { DestroyDecoder(); DbgLog((LOG_TRACE, 10, L"Initializing ffmpeg for codec %S", avcodec_get_name(codec))); BITMAPINFOHEADER *pBMI = NULL; videoFormatTypeHandler((const BYTE *)pmt->Format(), pmt->FormatType(), &pBMI); m_pAVCodec = avcodec_find_decoder(codec); CheckPointer(m_pAVCodec, VFW_E_UNSUPPORTED_VIDEO); m_pAVCtx = avcodec_alloc_context3(m_pAVCodec); CheckPointer(m_pAVCtx, E_POINTER); if(codec == AV_CODEC_ID_MPEG1VIDEO || codec == AV_CODEC_ID_MPEG2VIDEO || pmt->subtype == FOURCCMap(MKTAG('H','2','6','4')) || pmt->subtype == FOURCCMap(MKTAG('h','2','6','4'))) { m_pParser = av_parser_init(codec); } DWORD dwDecFlags = m_pCallback->GetDecodeFlags(); LONG biRealWidth = pBMI->biWidth, biRealHeight = pBMI->biHeight; if (pmt->formattype == FORMAT_VideoInfo || pmt->formattype == FORMAT_MPEGVideo) { VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)pmt->Format(); if (vih->rcTarget.right != 0 && vih->rcTarget.bottom != 0) { biRealWidth = vih->rcTarget.right; biRealHeight = vih->rcTarget.bottom; } } else if (pmt->formattype == FORMAT_VideoInfo2 || pmt->formattype == FORMAT_MPEG2Video) { VIDEOINFOHEADER2 *vih2 = (VIDEOINFOHEADER2 *)pmt->Format(); if (vih2->rcTarget.right != 0 && vih2->rcTarget.bottom != 0) { biRealWidth = vih2->rcTarget.right; biRealHeight = vih2->rcTarget.bottom; } } m_pAVCtx->codec_id = codec; m_pAVCtx->codec_tag = pBMI->biCompression; m_pAVCtx->coded_width = pBMI->biWidth; m_pAVCtx->coded_height = abs(pBMI->biHeight); m_pAVCtx->bits_per_coded_sample = pBMI->biBitCount; m_pAVCtx->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; m_pAVCtx->err_recognition = AV_EF_CAREFUL; m_pAVCtx->workaround_bugs = FF_BUG_AUTODETECT; m_pAVCtx->refcounted_frames = 1; if (codec == AV_CODEC_ID_H264) m_pAVCtx->flags2 |= CODEC_FLAG2_SHOW_ALL; // Setup threading int thread_type = getThreadFlags(codec); if (thread_type) { // Thread Count. 0 = auto detect int thread_count = m_pSettings->GetNumThreads(); if (thread_count == 0) { thread_count = av_cpu_count() * 3 / 2; } m_pAVCtx->thread_count = max(1, min(thread_count, AVCODEC_MAX_THREADS)); m_pAVCtx->thread_type = thread_type; } else { m_pAVCtx->thread_count = 1; } if (dwDecFlags & LAV_VIDEO_DEC_FLAG_NO_MT) { m_pAVCtx->thread_count = 1; } m_pFrame = av_frame_alloc(); CheckPointer(m_pFrame, E_POINTER); m_h264RandomAccess.SetAVCNALSize(0); // Process Extradata BYTE *extra = NULL; size_t extralen = 0; getExtraData(*pmt, NULL, &extralen); BOOL bH264avc = FALSE; if (extralen > 0) { DbgLog((LOG_TRACE, 10, L"-> Processing extradata of %d bytes", extralen)); // Reconstruct AVC1 extradata format if (pmt->formattype == FORMAT_MPEG2Video && (m_pAVCtx->codec_tag == MAKEFOURCC('a','v','c','1') || m_pAVCtx->codec_tag == MAKEFOURCC('A','V','C','1') || m_pAVCtx->codec_tag == MAKEFOURCC('C','C','V','1'))) { MPEG2VIDEOINFO *mp2vi = (MPEG2VIDEOINFO *)pmt->Format(); extralen += 7; extra = (uint8_t *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE); extra[0] = 1; extra[1] = (BYTE)mp2vi->dwProfile; extra[2] = 0; extra[3] = (BYTE)mp2vi->dwLevel; extra[4] = (BYTE)(mp2vi->dwFlags ? mp2vi->dwFlags : 4) - 1; // Actually copy the metadata into our new buffer size_t actual_len; getExtraData(*pmt, extra+6, &actual_len); // Count the number of SPS/PPS in them and set the length // We'll put them all into one block and add a second block with 0 elements afterwards // The parsing logic does not care what type they are, it just expects 2 blocks. BYTE *p = extra+6, *end = extra+6+actual_len; BOOL bSPS = FALSE, bPPS = FALSE; int count = 0; while (p+1 < end) { unsigned len = (((unsigned)p[0] << 8) | p[1]) + 2; if (p + len > end) { break; } if ((p[2] & 0x1F) == 7) bSPS = TRUE; if ((p[2] & 0x1F) == 8) bPPS = TRUE; count++; p += len; } extra[5] = count; extra[extralen-1] = 0; bH264avc = TRUE; m_h264RandomAccess.SetAVCNALSize(mp2vi->dwFlags); } else if (pmt->subtype == MEDIASUBTYPE_LAV_RAWVIDEO) { if (extralen < sizeof(m_pAVCtx->pix_fmt)) { DbgLog((LOG_TRACE, 10, L"-> LAV RAW Video extradata is missing..")); } else { extra = (uint8_t *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, NULL); m_pAVCtx->pix_fmt = *(AVPixelFormat *)extra; extralen -= sizeof(AVPixelFormat); memmove(extra, extra+sizeof(AVPixelFormat), extralen); } } else { // Just copy extradata for other formats extra = (uint8_t *)av_mallocz(extralen + FF_INPUT_BUFFER_PADDING_SIZE); getExtraData(*pmt, extra, NULL); } // Hack to discard invalid MP4 metadata with AnnexB style video if (codec == AV_CODEC_ID_H264 && !bH264avc && extra[0] == 1) { av_freep(&extra); extralen = 0; } m_pAVCtx->extradata = extra; m_pAVCtx->extradata_size = (int)extralen; } else { if (codec == AV_CODEC_ID_VP6 || codec == AV_CODEC_ID_VP6A || codec == AV_CODEC_ID_VP6F) { int cropH = pBMI->biWidth - biRealWidth; int cropV = pBMI->biHeight - biRealHeight; if (cropH >= 0 && cropH <= 0x0f && cropV >= 0 && cropV <= 0x0f) { m_pAVCtx->extradata = (uint8_t *)av_mallocz(1 + FF_INPUT_BUFFER_PADDING_SIZE); m_pAVCtx->extradata_size = 1; m_pAVCtx->extradata[0] = (cropH << 4) | cropV; } } } m_h264RandomAccess.flush(m_pAVCtx->thread_count); m_CurrentThread = 0; m_rtStartCache = AV_NOPTS_VALUE; LAVPinInfo lavPinInfo = {0}; BOOL bLAVInfoValid = SUCCEEDED(m_pCallback->GetLAVPinInfo(lavPinInfo)); m_bInputPadded = dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER; // Setup codec-specific timing logic BOOL bVC1IsPTS = (codec == AV_CODEC_ID_VC1 && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_VC1_DTS)); // Use ffmpegs logic to reorder timestamps // This is required for H264 content (except AVI), and generally all codecs that use frame threading // VC-1 is also a special case. Its required for splitters that deliver PTS timestamps (see bVC1IsPTS above) m_bFFReordering = ( codec == AV_CODEC_ID_H264 && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_H264_AVI)) || codec == AV_CODEC_ID_VP8 || codec == AV_CODEC_ID_VP3 || codec == AV_CODEC_ID_THEORA || codec == AV_CODEC_ID_HUFFYUV || codec == AV_CODEC_ID_FFVHUFF || codec == AV_CODEC_ID_MPEG2VIDEO || codec == AV_CODEC_ID_MPEG1VIDEO || codec == AV_CODEC_ID_DIRAC || codec == AV_CODEC_ID_UTVIDEO || codec == AV_CODEC_ID_DNXHD || codec == AV_CODEC_ID_JPEG2000 || (codec == AV_CODEC_ID_MPEG4 && pmt->formattype == FORMAT_MPEG2Video) || bVC1IsPTS; // Stop time is unreliable, drop it and calculate it m_bCalculateStopTime = (codec == AV_CODEC_ID_H264 || codec == AV_CODEC_ID_DIRAC || (codec == AV_CODEC_ID_MPEG4 && pmt->formattype == FORMAT_MPEG2Video) || bVC1IsPTS); // Real Video content has some odd timestamps // LAV Splitter does them allright with RV30/RV40, everything else screws them up m_bRVDropBFrameTimings = (codec == AV_CODEC_ID_RV10 || codec == AV_CODEC_ID_RV20 || ((codec == AV_CODEC_ID_RV30 || codec == AV_CODEC_ID_RV40) && (!(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) || (bLAVInfoValid && (lavPinInfo.flags & LAV_STREAM_FLAG_RV34_MKV))))); // Enable B-Frame delay handling m_bBFrameDelay = !m_bFFReordering && !m_bRVDropBFrameTimings; m_bWaitingForKeyFrame = TRUE; m_bResumeAtKeyFrame = codec == AV_CODEC_ID_MPEG2VIDEO || codec == AV_CODEC_ID_VC1 || codec == AV_CODEC_ID_RV30 || codec == AV_CODEC_ID_RV40 || codec == AV_CODEC_ID_VP3 || codec == AV_CODEC_ID_THEORA || codec == AV_CODEC_ID_MPEG4; m_bNoBufferConsumption = codec == AV_CODEC_ID_MJPEGB || codec == AV_CODEC_ID_LOCO || codec == AV_CODEC_ID_JPEG2000; m_bHasPalette = m_pAVCtx->bits_per_coded_sample <= 8 && m_pAVCtx->extradata_size && !(dwDecFlags & LAV_VIDEO_DEC_FLAG_LAVSPLITTER) && (codec == AV_CODEC_ID_MSVIDEO1 || codec == AV_CODEC_ID_MSRLE || codec == AV_CODEC_ID_CINEPAK || codec == AV_CODEC_ID_8BPS || codec == AV_CODEC_ID_QPEG || codec == AV_CODEC_ID_QTRLE || codec == AV_CODEC_ID_TSCC); if (FAILED(AdditionaDecoderInit())) { return E_FAIL; } if (bLAVInfoValid) { // Setting has_b_frames to a proper value will ensure smoother decoding of H264 if (lavPinInfo.has_b_frames >= 0) { DbgLog((LOG_TRACE, 10, L"-> Setting has_b_frames to %d", lavPinInfo.has_b_frames)); m_pAVCtx->has_b_frames = lavPinInfo.has_b_frames; } } // Open the decoder int ret = avcodec_open2(m_pAVCtx, m_pAVCodec, NULL); if (ret >= 0) { DbgLog((LOG_TRACE, 10, L"-> ffmpeg codec opened successfully (ret: %d)", ret)); m_nCodecId = codec; } else { DbgLog((LOG_TRACE, 10, L"-> ffmpeg codec failed to open (ret: %d)", ret)); DestroyDecoder(); return VFW_E_UNSUPPORTED_VIDEO; } m_iInterlaced = 0; for (int i = 0; i < countof(ff_interlace_capable); i++) { if (codec == ff_interlace_capable[i]) { m_iInterlaced = -1; break; } } // Detect chroma and interlaced if (m_pAVCtx->extradata && m_pAVCtx->extradata_size) { if (codec == AV_CODEC_ID_MPEG2VIDEO) { CMPEG2HeaderParser mpeg2Parser(extra, extralen); if (mpeg2Parser.hdr.valid) { if (mpeg2Parser.hdr.chroma < 2) { m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV420P; } else if (mpeg2Parser.hdr.chroma == 2) { m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P; } m_iInterlaced = mpeg2Parser.hdr.interlaced; } } else if (codec == AV_CODEC_ID_H264) { CH264SequenceParser h264parser; if (bH264avc) h264parser.ParseNALs(extra+6, extralen-6, 2); else h264parser.ParseNALs(extra, extralen, 0); if (h264parser.sps.valid) m_iInterlaced = h264parser.sps.interlaced; } else if (codec == AV_CODEC_ID_VC1) { CVC1HeaderParser vc1parser(extra, extralen); if (vc1parser.hdr.valid) m_iInterlaced = (vc1parser.hdr.interlaced ? -1 : 0); } } if (codec == AV_CODEC_ID_DNXHD) m_pAVCtx->pix_fmt = AV_PIX_FMT_YUV422P10; else if (codec == AV_CODEC_ID_FRAPS) m_pAVCtx->pix_fmt = AV_PIX_FMT_BGR24; if (bLAVInfoValid && codec != AV_CODEC_ID_FRAPS && m_pAVCtx->pix_fmt != AV_PIX_FMT_DXVA2_VLD) m_pAVCtx->pix_fmt = lavPinInfo.pix_fmt; DbgLog((LOG_TRACE, 10, L"AVCodec init successfull. interlaced: %d", m_iInterlaced)); return S_OK; }