//------------------------------------------------------------------- // Write the sample // HRESULT VidWriter::writeFrame(BYTE *pData) { HRESULT hr; IMFSample *pSample = NULL; const DWORD cbBuffer = 4 * m_width * m_height; // Unlock the buffer if (m_pBuffer) m_pBuffer->Unlock(); // Set the data length of the buffer hr = m_pBuffer->SetCurrentLength(cbBuffer); if (FAILED(hr)) goto done; // Create a media sample and add the buffer to it hr = MFCreateSample(&pSample); if (FAILED(hr)) goto done; hr = pSample->AddBuffer(m_pBuffer); if (FAILED(hr)) goto done; // Set the time stamp and the duration hr = pSample->SetSampleTime(m_rtStart); if (FAILED(hr)) goto done; hr = pSample->SetSampleDuration(m_frametime); if (FAILED(hr)) goto done; // increment the time stamp m_rtStart += m_frametime; // Send the sample to the Sink Writer hr = m_pWriter->WriteSample(m_streamIndex, pSample); done: SafeRelease(&pSample); return hr; }
HRESULT WWMFResampler::ConvertWWSampleDataToMFSample(WWMFSampleData &sampleData, IMFSample **ppSample) { HRESULT hr = S_OK; IMFSample *pSample = NULL; #ifdef USE_ATL CComPtr<IMFMediaBuffer> spBuffer; #else // USE_ATL IMFMediaBuffer *spBuffer = NULL; #endif // USE_ATL BYTE *pByteBufferTo = NULL; //LONGLONG hnsSampleDuration; //LONGLONG hnsSampleTime; int frameCount; assert(ppSample); *ppSample = NULL; HRG(MFCreateMemoryBuffer(sampleData.bytes, &spBuffer)); HRG(spBuffer->Lock(&pByteBufferTo, NULL, NULL)); memcpy(pByteBufferTo, sampleData.data, sampleData.bytes); pByteBufferTo = NULL; HRG(spBuffer->Unlock()); HRG(spBuffer->SetCurrentLength(sampleData.bytes)); HRG(MFCreateSample(&pSample)); HRG(pSample->AddBuffer(spBuffer)); frameCount = sampleData.bytes / m_inputFormat.FrameBytes(); /* hnsSampleDuration = (LONGLONG)(10.0 * 1000 * 1000 * frameCount / m_inputFormat.sampleRate); hnsSampleTime = (LONGLONG)(10.0 * 1000 * 1000 * m_inputFrameTotal / m_inputFormat.sampleRate); HRG(pSample->SetSampleDuration(hnsSampleDuration)); HRG(pSample->SetSampleTime(hnsSampleTime)); */ m_inputFrameTotal += frameCount; // succeeded. *ppSample = pSample; pSample = NULL; //< prevent release end: SafeRelease(&pSample); #ifndef USE_ATL SafeRelease(&spBuffer); #endif // uSE_ATL return hr; }
HRESULT VideoEncoder::WriteTransitionSample(UINT64 sampleDuration, TransitionBase* pTransition, DWORD streamIndex, LONGLONG* startTime) { HRESULT hr = S_OK; IMFMediaBuffer* pMediaBuffer = nullptr; BYTE* pFrameBuffer = nullptr; IMFSample* pSample = nullptr; BYTE* pOutputFrame = nullptr; for (DWORD i = 0; i < sampleDuration; i++) { CheckHR(MFCreateMemoryBuffer(this->m_frameBufferSize, &pMediaBuffer)); pMediaBuffer->Lock(&pFrameBuffer, nullptr, nullptr); float time = (float)i / (float)sampleDuration; pOutputFrame = pTransition->GetOutputFrame(time); CheckHR(MFCopyImage(pFrameBuffer, this->m_frameStride, pOutputFrame, this->m_frameStride, this->m_frameStride, this->m_frameHeight)); CheckHR(pMediaBuffer->Unlock()); CheckHR(pMediaBuffer->SetCurrentLength(this->m_frameBufferSize)); CheckHR(MFCreateSample(&pSample)); CheckHR(pSample->AddBuffer(pMediaBuffer)); CheckHR(pSample->SetSampleTime(*startTime)); CheckHR(pSample->SetSampleDuration(this->GetFrameDuration())); CheckHR(this->m_pSinkWriter->WriteSample(streamIndex, pSample)); (*startTime) += this->GetFrameDuration(); // 释放示例资源. SafeRelease(&pMediaBuffer); SafeRelease(&pSample); if (pOutputFrame != nullptr) { delete pOutputFrame; pOutputFrame = nullptr; } } cleanup: if (!SUCCEEDED(hr)) { DWORD error = GetLastError(); this->m_logFileStream << "意外错误: " << error << endl; } SafeRelease(&pMediaBuffer); SafeRelease(&pSample); if (pOutputFrame != nullptr) { delete pOutputFrame; pOutputFrame = nullptr; } return hr; }
IMFSample* create_sample(void *data, DWORD len, DWORD alignment, LONGLONG duration) { HRESULT hr = S_OK; IMFMediaBuffer *buf = NULL; IMFSample *sample = NULL; hr = MFCreateSample(&sample); if (FAILED(hr)) { ReportError(L"Unable to allocate a sample", hr); return NULL; } // Yes, the argument for alignment is the actual alignment - 1 hr = MFCreateAlignedMemoryBuffer(len, alignment - 1, &buf); if (FAILED(hr)) { ReportError(L"Unable to allocate a memory buffer for sample", hr); return NULL; } if (data) { BYTE *buffer; // lock the MediaBuffer // this is actually not a thread-safe lock hr = buf->Lock(&buffer, NULL, NULL); if (FAILED(hr)) { SafeRelease(&sample); SafeRelease(&buf); return NULL; } memcpy(buffer, data, len); buf->SetCurrentLength(len); buf->Unlock(); } sample->AddBuffer(buf); hr = sample->SetSampleDuration(duration); SafeRelease(&buf); return sample; }
// Process the incomming NAL from the queue: wraps it up into a // IMFMediaSample, sends it to the decoder. // // Thread context: decoder thread bool DecoderMF::DoProcessInputNAL(IBMDStreamingH264NALPacket* nalPacket) { bool ret = false; HRESULT hr; IMFMediaBuffer* newBuffer = NULL; BYTE* newBufferPtr; void* nalPacketPtr; // IMFSample* newSample = NULL; ULONGLONG nalPresentationTime; const BYTE nalPrefix[] = {0, 0, 0, 1}; // Get a pointer to the NAL data hr = nalPacket->GetBytes(&nalPacketPtr); if (FAILED(hr)) goto bail; // Create the MF media buffer (+ 4 bytes for the NAL Prefix (0x00 0x00 0x00 0x01) // which MF requires. hr = MFCreateMemoryBuffer(nalPacket->GetPayloadSize()+4, &newBuffer); if (FAILED(hr)) goto bail; // Lock the MF media buffer hr = newBuffer->Lock(&newBufferPtr, NULL, NULL); if (FAILED(hr)) goto bail; // Copy the prefix and the data memcpy(newBufferPtr, nalPrefix, 4); memcpy(newBufferPtr+4, nalPacketPtr, nalPacket->GetPayloadSize()); // Unlock the MF media buffer hr = newBuffer->Unlock(); if (FAILED(hr)) goto bail; // Update the current length of the MF media buffer hr = newBuffer->SetCurrentLength(nalPacket->GetPayloadSize()+4); if (FAILED(hr)) goto bail; // We now have a IMFMediaBuffer with the contents of the NAL // packet. We now construct a IMFSample with the buffer hr = MFCreateSample(&newSample); if (FAILED(hr)) goto bail; hr = newSample->AddBuffer(newBuffer); if (FAILED(hr)) goto bail; // Get the presentation (display) time in 100-nanosecond units // TODO: this is pretty meaningless without setting the start time. hr = nalPacket->GetDisplayTime(1000 * 1000 * 10, &nalPresentationTime); if (FAILED(hr)) goto bail; // Set presentation time on the sample hr = newSample->SetSampleTime(nalPresentationTime); if (FAILED(hr)) goto bail; // Now parse it to the decoder for (;;) { hr = m_h264Decoder->ProcessInput(0, newSample, 0); if (hr == S_OK) break; if (hr != MF_E_NOTACCEPTING || DoProcessOutput() == false) goto bail; } ret = true; bail: if (newBuffer != NULL) newBuffer->Release(); if (newSample != NULL) newSample->Release(); return ret; }
HRESULT WavStream::CreateAudioSample(IMFSample **ppSample) { HRESULT hr = S_OK; IMFMediaBuffer *pBuffer = NULL; IMFSample *pSample = NULL; DWORD cbBuffer = 0; BYTE *pData = NULL; LONGLONG duration = 0; // Start with one second of data, rounded up to the nearest block. cbBuffer = AlignUp<DWORD>(m_pRiff->Format()->nAvgBytesPerSec, m_pRiff->Format()->nBlockAlign); // Don't request any more than what's left. cbBuffer = min(cbBuffer, m_pRiff->BytesRemainingInChunk()); // Create the buffer. hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer); // Get a pointer to the buffer memory. if (SUCCEEDED(hr)) { hr = pBuffer->Lock(&pData, NULL, NULL); } // Fill the buffer if (SUCCEEDED(hr)) { hr = m_pRiff->ReadDataFromChunk(pData, cbBuffer); } // Unlock the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->Unlock(); pData = NULL; } // Set the size of the valid data in the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->SetCurrentLength(cbBuffer); } // Create a new sample and add the buffer to it. if (SUCCEEDED(hr)) { hr = MFCreateSample(&pSample); } if (SUCCEEDED(hr)) { hr = pSample->AddBuffer(pBuffer); } // Set the time stamps, duration, and sample flags. if (SUCCEEDED(hr)) { hr = pSample->SetSampleTime(m_rtCurrentPosition); } if (SUCCEEDED(hr)) { duration = AudioDurationFromBufferSize(m_pRiff->Format(), cbBuffer); hr = pSample->SetSampleDuration(duration); } // Set the discontinuity flag. if (SUCCEEDED(hr)) { if (m_discontinuity) { hr = pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE); } } if (SUCCEEDED(hr)) { // Update our current position. m_rtCurrentPosition += duration; // Give the pointer to the caller. *ppSample = pSample; (*ppSample)->AddRef(); } if (pData && pBuffer) { hr = pBuffer->Unlock(); } SafeRelease(&pBuffer); SafeRelease(&pSample); return hr; }
void VideoCompressor::AudioSample32Bit2Channel(float *Samples, UINT FrameCount, UINT64 CaptureStartTime) { //double TimeInSeconds = _Clock->Elapsed(); const UINT SamplesPerSecond = 44100; const UINT ChannelCount = 2; const UINT SampleCount = FrameCount * ChannelCount; const UINT BitsPerSample = 16; const UINT BufferLength = BitsPerSample / 8 * ChannelCount * FrameCount; const LONGLONG SampleDuration = LONGLONG(FrameCount) * LONGLONG(10000000) / SamplesPerSecond; // in hns // // Write some data // IMFSample *spSample; IMFMediaBuffer *spBuffer; BYTE *pbBuffer = NULL; // // Create a media sample // HRESULT hr = MFCreateSample( &spSample ); hr = spSample->SetSampleDuration( SampleDuration ); //hr = spSample->SetSampleTime( LONGLONG( TimeInSeconds * 10000000.0 ) ); //CaptureStartTime = 10,000,000 * t / f; //t = CaptureStartTime * f / 10,000,000 LONGLONG FileStartCounter = _Clock->StartTime(); LONGLONG CaptureStartCounter = CaptureStartTime * _Clock->TicksPerSecond() / LONGLONG(10000000); hr = spSample->SetSampleTime( ( CaptureStartCounter - FileStartCounter ) * LONGLONG(10000000) / _Clock->TicksPerSecond() ); // // Add a media buffer filled with random data // hr = MFCreateMemoryBuffer( BufferLength, &spBuffer ); hr = spBuffer->SetCurrentLength( BufferLength ); hr = spSample->AddBuffer( spBuffer ); hr = spBuffer->Lock( &pbBuffer, NULL, NULL ); __int16 *OutputAudioBuffer = (__int16 *)pbBuffer; for(UINT SampleIndex = 0; SampleIndex < SampleCount; SampleIndex++) { // // Floats are in the range -1 to 1 // OutputAudioBuffer[SampleIndex] = int(Samples[SampleIndex] * 32768.0f); } hr = spBuffer->Unlock(); // // Write the media sample // hr = _Writer->WriteSample( 1, spSample ); PersistentAssert(SUCCEEDED(hr), "WriteSample failed"); spSample->Release(); spBuffer->Release(); }
virtual void doGetNextFrame() { if (!_isInitialised) { _isInitialised = true; if (!initialise()) { printf("Video device initialisation failed, stopping."); return; } } if (!isCurrentlyAwaitingData()) return; DWORD processOutputStatus = 0; IMFSample *videoSample = NULL; DWORD streamIndex, flags; LONGLONG llVideoTimeStamp, llSampleDuration; HRESULT mftProcessInput = S_OK; HRESULT mftProcessOutput = S_OK; MFT_OUTPUT_STREAM_INFO StreamInfo; IMFMediaBuffer *pBuffer = NULL; IMFSample *mftOutSample = NULL; DWORD mftOutFlags; bool frameSent = false; CHECK_HR(_videoReader->ReadSample( MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), "Error reading video sample."); if (videoSample) { _frameCount++; CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n"); CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n"); // Pass the video sample to the H.264 transform. CHECK_HR(_pTransform->ProcessInput(0, videoSample, 0), "The resampler H264 ProcessInput call failed.\n"); CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n"); if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY) { printf("Sample ready.\n"); CHECK_HR(_pTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H264 MFT.\n"); CHECK_HR(MFCreateSample(&mftOutSample), "Failed to create MF sample.\n"); CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n"); CHECK_HR(mftOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n"); while (true) { _outputDataBuffer.dwStreamID = 0; _outputDataBuffer.dwStatus = 0; _outputDataBuffer.pEvents = NULL; _outputDataBuffer.pSample = mftOutSample; mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus); if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT) { CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n"); CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n"); IMFMediaBuffer *buf = NULL; DWORD bufLength; CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n"); CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n"); BYTE * rawBuffer = NULL; auto now = GetTickCount(); printf("Writing sample %i, spacing %I64dms, sample time %I64d, sample duration %I64d, sample size %i.\n", _frameCount, now - _lastSendAt, llVideoTimeStamp, llSampleDuration, bufLength); fFrameSize = bufLength; fDurationInMicroseconds = 0; gettimeofday(&fPresentationTime, NULL); buf->Lock(&rawBuffer, NULL, NULL); memmove(fTo, rawBuffer, fFrameSize); FramedSource::afterGetting(this); buf->Unlock(); SafeRelease(&buf); frameSent = true; _lastSendAt = GetTickCount(); } SafeRelease(&pBuffer); SafeRelease(&mftOutSample); break; } } else { printf("No sample.\n"); } SafeRelease(&videoSample); } if (!frameSent) { envir().taskScheduler().triggerEvent(eventTriggerId, this); } return; done: printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n"); }
STDMETHODIMP CDecWMV9MFT::ProcessOutput() { HRESULT hr = S_OK; DWORD dwStatus = 0; MFT_OUTPUT_STREAM_INFO outputInfo = {0}; m_pMFT->GetOutputStreamInfo(0, &outputInfo); IMFMediaBuffer *pMFBuffer = nullptr; ASSERT(!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)); MFT_OUTPUT_DATA_BUFFER OutputBuffer = {0}; if (!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) { pMFBuffer = GetBuffer(outputInfo.cbSize); if (!pMFBuffer) { DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffere")); return E_FAIL; } IMFSample *pSampleOut = nullptr; hr = MF.CreateSample(&pSampleOut); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"Unable to allocate MF sample, hr: 0x%x", hr)); ReleaseBuffer(pMFBuffer); return E_FAIL; } pSampleOut->AddBuffer(pMFBuffer); OutputBuffer.pSample = pSampleOut; } hr = m_pMFT->ProcessOutput(0, 1, &OutputBuffer, &dwStatus); // We don't process events, just release them SafeRelease(&OutputBuffer.pEvents); // handle stream format changes if (hr == MF_E_TRANSFORM_STREAM_CHANGE || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE ) { SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); hr = SelectOutputType(); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> Failed to handle stream change, hr: %x", hr)); return E_FAIL; } // try again with the new type, it should work now! return ProcessOutput(); } // the MFT generated no output, discard the sample and return if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE) { SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); return S_FALSE; } // unknown error condition if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr)); SafeRelease(&OutputBuffer.pSample); ReleaseBuffer(pMFBuffer); return E_FAIL; } LAVFrame *pFrame = nullptr; AllocateFrame(&pFrame); IMFMediaType *pMTOut = nullptr; m_pMFT->GetOutputCurrentType(0, &pMTOut); MFGetAttributeSize(pMTOut, MF_MT_FRAME_SIZE, (UINT32 *)&pFrame->width, (UINT32 *)&pFrame->height); pFrame->format = m_OutPixFmt; AVRational pixel_aspect_ratio = {1, 1}; MFGetAttributeRatio(pMTOut, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&pixel_aspect_ratio.num, (UINT32*)&pixel_aspect_ratio.den); AVRational display_aspect_ratio = {0, 0}; av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, (int64_t)pixel_aspect_ratio.num * pFrame->width, (int64_t)pixel_aspect_ratio.den * pFrame->height, INT_MAX); pFrame->aspect_ratio = display_aspect_ratio; pFrame->interlaced = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_Interlaced, FALSE); pFrame->repeat = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_RepeatFirstField, FALSE); LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder(); pFrame->tff = (fo == DeintFieldOrder_Auto) ? !MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_BottomFieldFirst, FALSE) : (fo == DeintFieldOrder_TopFieldFirst); if (pFrame->interlaced && !m_bInterlaced) m_bInterlaced = TRUE; pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable); pFrame->ext_format.VideoPrimaries = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown); pFrame->ext_format.VideoTransferFunction = MFGetAttributeUINT32(pMTOut, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown); pFrame->ext_format.VideoTransferMatrix = MFGetAttributeUINT32(pMTOut, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown); pFrame->ext_format.VideoChromaSubsampling = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown); pFrame->ext_format.NominalRange = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown); // HACK: don't flag range=limited if its the only value set, since its also the implied default, this helps to avoid a reconnect // The MFT always sets this value, even if the bitstream says nothing about it, causing a reconnect on every vc1/wmv3 file if (pFrame->ext_format.value == 0x2000) pFrame->ext_format.value = 0; // Timestamps if (m_bManualReorder) { if (!m_timestampQueue.empty()) { pFrame->rtStart = m_timestampQueue.front(); m_timestampQueue.pop(); LONGLONG llDuration = 0; hr = OutputBuffer.pSample->GetSampleDuration(&llDuration); if (SUCCEEDED(hr) && llDuration > 0) { pFrame->rtStop = pFrame->rtStart + llDuration; } } } else { LONGLONG llTimestamp = 0; hr = OutputBuffer.pSample->GetSampleTime(&llTimestamp); if (SUCCEEDED(hr)) { pFrame->rtStart = llTimestamp; LONGLONG llDuration = 0; hr = OutputBuffer.pSample->GetSampleDuration(&llDuration); if (SUCCEEDED(hr) && llDuration > 0) { pFrame->rtStop = pFrame->rtStart + llDuration; } } } SafeRelease(&pMTOut); // Lock memory in the buffer BYTE *pBuffer = nullptr; pMFBuffer->Lock(&pBuffer, NULL, NULL); // Check alignment // If not properly aligned, we need to make the data aligned. int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32; if ((pFrame->width % alignment) != 0) { hr = AllocLAVFrameBuffers(pFrame); if (FAILED(hr)) { pMFBuffer->Unlock(); ReleaseBuffer(pMFBuffer); SafeRelease(&OutputBuffer.pSample); return hr; } size_t ySize = pFrame->width * pFrame->height; memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height); if (m_OutPixFmt == LAVPixFmt_NV12) { memcpy_plane(pFrame->data[1], pBuffer + ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2); } else if (m_OutPixFmt == LAVPixFmt_YUV420) { size_t uvSize = ySize / 4; memcpy_plane(pFrame->data[2], pBuffer + ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2); memcpy_plane(pFrame->data[1], pBuffer + ySize + uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2); } pMFBuffer->Unlock(); ReleaseBuffer(pMFBuffer); } else { if (m_OutPixFmt == LAVPixFmt_NV12) { pFrame->data[0] = pBuffer; pFrame->data[1] = pBuffer + pFrame->width * pFrame->height; pFrame->stride[0] = pFrame->stride[1] = pFrame->width; } else if (m_OutPixFmt == LAVPixFmt_YUV420) { pFrame->data[0] = pBuffer; pFrame->data[2] = pBuffer + pFrame->width * pFrame->height; pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2); pFrame->stride[0] = pFrame->width; pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2; } pFrame->data[3] = (BYTE *)pMFBuffer; pFrame->destruct = wmv9_buffer_destruct; pFrame->priv_data = this; } pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY; Deliver(pFrame); SafeRelease(&OutputBuffer.pSample); if (OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE) return ProcessOutput(); return hr; }
STDMETHODIMP CDecWMV9MFT::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pMediaSample) { HRESULT hr = S_OK; DWORD dwStatus = 0; hr = m_pMFT->GetInputStatus(0, &dwStatus); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> GetInputStatus() failed with hr: 0x%x", hr)); return S_FALSE; } if (!(dwStatus & MFT_INPUT_STATUS_ACCEPT_DATA)) return S_FALSE; if (m_vc1Header && (m_bManualReorder || m_bNeedKeyFrame)) { AVPictureType pictype = m_vc1Header->ParseVC1PictureType(buffer, buflen); if (m_bManualReorder) { if (pictype == AV_PICTURE_TYPE_I || pictype == AV_PICTURE_TYPE_P) { if (m_bReorderBufferValid) m_timestampQueue.push(m_rtReorderBuffer); m_rtReorderBuffer = rtStart; m_bReorderBufferValid = TRUE; } else { m_timestampQueue.push(rtStart); } } if (m_bNeedKeyFrame) { if (pictype != AV_PICTURE_TYPE_I) { if (m_bManualReorder) m_timestampQueue.pop(); return S_OK; } else { m_bNeedKeyFrame = FALSE; bSyncPoint = TRUE; } } } IMFSample *pSample = nullptr; hr = MF.CreateSample(&pSample); if (FAILED(hr)) { DbgLog((LOG_ERROR, 10, L"Unable to allocate MF Sample, hr: 0x%x", hr)); return E_FAIL; } IMFMediaBuffer *pMFBuffer = CreateMediaBuffer(buffer, buflen); if (!pMFBuffer) { DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffer")); SafeRelease(&pSample); return E_FAIL; } pSample->AddBuffer(pMFBuffer); if (rtStart != AV_NOPTS_VALUE) { pSample->SetSampleTime(rtStart); if (rtStop != AV_NOPTS_VALUE && rtStop > (rtStart - 1)) pSample->SetSampleDuration(rtStop - rtStart); } pSample->SetUINT32(MFSampleExtension_CleanPoint, bSyncPoint); pSample->SetUINT32(MFSampleExtension_Discontinuity, bDiscontinuity); hr = m_pMFT->ProcessInput(0, pSample, 0); if (hr == MF_E_NOTACCEPTING) { // Not accepting data right now, try to process output and try again ProcessOutput(); hr = m_pMFT->ProcessInput(0, pSample, 0); } SafeRelease(&pMFBuffer); SafeRelease(&pSample); if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"-> ProcessInput failed with hr: 0x%x", hr)); return E_FAIL; } return ProcessOutput(); }