HRESULT WWMFResampler::GetSampleDataFromMFTransform(WWMFSampleData *sampleData_return) { HRESULT hr = S_OK; IMFMediaBuffer *pBuffer = NULL; MFT_OUTPUT_STREAM_INFO streamInfo; MFT_OUTPUT_DATA_BUFFER outputDataBuffer; DWORD dwStatus; memset(&streamInfo, 0, sizeof streamInfo); memset(&outputDataBuffer, 0, sizeof outputDataBuffer); assert(sampleData_return); assert(NULL == sampleData_return->data); HRG(MFCreateSample(&(outputDataBuffer.pSample))); HRG(MFCreateMemoryBuffer(sampleData_return->bytes, &pBuffer)); HRG(outputDataBuffer.pSample->AddBuffer(pBuffer)); outputDataBuffer.dwStreamID = 0; outputDataBuffer.dwStatus = 0; outputDataBuffer.pEvents = NULL; hr = m_pTransform->ProcessOutput(0, 1, &outputDataBuffer, &dwStatus); if (FAILED(hr)) { goto end; } HRG(ConvertMFSampleToWWSampleData(outputDataBuffer.pSample, sampleData_return)); end: SafeRelease(&pBuffer); SafeRelease(&outputDataBuffer.pSample); return hr; }
HRESULT WWMFResampler::ConvertWWSampleDataToMFSample(WWMFSampleData &sampleData, IMFSample **ppSample) { HRESULT hr = S_OK; IMFSample *pSample = NULL; #ifdef USE_ATL CComPtr<IMFMediaBuffer> spBuffer; #else // USE_ATL IMFMediaBuffer *spBuffer = NULL; #endif // USE_ATL BYTE *pByteBufferTo = NULL; //LONGLONG hnsSampleDuration; //LONGLONG hnsSampleTime; int frameCount; assert(ppSample); *ppSample = NULL; HRG(MFCreateMemoryBuffer(sampleData.bytes, &spBuffer)); HRG(spBuffer->Lock(&pByteBufferTo, NULL, NULL)); memcpy(pByteBufferTo, sampleData.data, sampleData.bytes); pByteBufferTo = NULL; HRG(spBuffer->Unlock()); HRG(spBuffer->SetCurrentLength(sampleData.bytes)); HRG(MFCreateSample(&pSample)); HRG(pSample->AddBuffer(spBuffer)); frameCount = sampleData.bytes / m_inputFormat.FrameBytes(); /* hnsSampleDuration = (LONGLONG)(10.0 * 1000 * 1000 * frameCount / m_inputFormat.sampleRate); hnsSampleTime = (LONGLONG)(10.0 * 1000 * 1000 * m_inputFrameTotal / m_inputFormat.sampleRate); HRG(pSample->SetSampleDuration(hnsSampleDuration)); HRG(pSample->SetSampleTime(hnsSampleTime)); */ m_inputFrameTotal += frameCount; // succeeded. *ppSample = pSample; pSample = NULL; //< prevent release end: SafeRelease(&pSample); #ifndef USE_ATL SafeRelease(&spBuffer); #endif // uSE_ATL return hr; }
HRESULT VideoEncoder::WriteTransitionSample(UINT64 sampleDuration, TransitionBase* pTransition, DWORD streamIndex, LONGLONG* startTime) { HRESULT hr = S_OK; IMFMediaBuffer* pMediaBuffer = nullptr; BYTE* pFrameBuffer = nullptr; IMFSample* pSample = nullptr; BYTE* pOutputFrame = nullptr; for (DWORD i = 0; i < sampleDuration; i++) { CheckHR(MFCreateMemoryBuffer(this->m_frameBufferSize, &pMediaBuffer)); pMediaBuffer->Lock(&pFrameBuffer, nullptr, nullptr); float time = (float)i / (float)sampleDuration; pOutputFrame = pTransition->GetOutputFrame(time); CheckHR(MFCopyImage(pFrameBuffer, this->m_frameStride, pOutputFrame, this->m_frameStride, this->m_frameStride, this->m_frameHeight)); CheckHR(pMediaBuffer->Unlock()); CheckHR(pMediaBuffer->SetCurrentLength(this->m_frameBufferSize)); CheckHR(MFCreateSample(&pSample)); CheckHR(pSample->AddBuffer(pMediaBuffer)); CheckHR(pSample->SetSampleTime(*startTime)); CheckHR(pSample->SetSampleDuration(this->GetFrameDuration())); CheckHR(this->m_pSinkWriter->WriteSample(streamIndex, pSample)); (*startTime) += this->GetFrameDuration(); // 释放示例资源. SafeRelease(&pMediaBuffer); SafeRelease(&pSample); if (pOutputFrame != nullptr) { delete pOutputFrame; pOutputFrame = nullptr; } } cleanup: if (!SUCCEEDED(hr)) { DWORD error = GetLastError(); this->m_logFileStream << "意外错误: " << error << endl; } SafeRelease(&pMediaBuffer); SafeRelease(&pSample); if (pOutputFrame != nullptr) { delete pOutputFrame; pOutputFrame = nullptr; } return hr; }
HRESULT CASFManager::ReadDataIntoBuffer( IMFByteStream *pStream, // Pointer to the byte stream. DWORD cbOffset, // Offset at which to start reading DWORD cbToRead, // Number of bytes to read IMFMediaBuffer **ppBuffer // Receives a pointer to the buffer. ) { HRESULT hr = S_OK; BYTE *pData = NULL; DWORD cbRead = 0; // Actual amount of data read IMFMediaBuffer *pBuffer = NULL; // Create the media buffer. This function allocates the memory. CHECK_HR(hr = MFCreateMemoryBuffer(cbToRead, &pBuffer)); // Access the buffer. CHECK_HR(hr = pBuffer->Lock(&pData, NULL, NULL)); //Set the offset CHECK_HR(hr = pStream->SetCurrentPosition(cbOffset)); // Read the data from the byte stream. CHECK_HR(hr = pStream->Read(pData, cbToRead, &cbRead)); CHECK_HR(hr = pBuffer->Unlock()); pData = NULL; // Update the size of the valid data. CHECK_HR(hr = pBuffer->SetCurrentLength(cbRead)); // Return the pointer to the caller. *ppBuffer = pBuffer; (*ppBuffer)->AddRef(); TRACE((L"Read data from the ASF file into a media buffer.\n")); done: LOG_MSG_IF_FAILED(L"CASFManager::ReadDataIntoBuffer failed.\n", hr); if (pData) { pBuffer->Unlock(); } SAFE_RELEASE(pBuffer); return hr; }
//------------------------------------------------------------------- // Get a buffer to write data into // HRESULT VidWriter::getWriteBuffer(BYTE **ppData) { HRESULT hr; const DWORD cbBuffer = 4 * m_width * m_height; // Create a new memory buffer SafeRelease(&m_pBuffer); hr = MFCreateMemoryBuffer(cbBuffer, &m_pBuffer); if (FAILED(hr)) goto done; // Lock the buffer hr = m_pBuffer->Lock(ppData, NULL, NULL); done: return hr; }
HRESULT GDISampleProvider::GetSample(IMFSample** ppSample) const { BOOL b = BitBlt(hDest, 0, 0, width, height, hdc, 0, 0, SRCCOPY); assert(b); BYTE * pixels; IMFMediaBufferPtr pBuffer; HRESULT hr = MFCreateMemoryBuffer(length, &pBuffer); BITMAPINFOHEADER bi; bi.biSize = sizeof(BITMAPINFOHEADER); bi.biWidth = width; bi.biHeight = height; bi.biPlanes = 1; bi.biBitCount = 32; bi.biCompression = BI_RGB; bi.biSizeImage = 0; bi.biXPelsPerMeter = 0; bi.biYPelsPerMeter = 0; bi.biClrUsed = 0; bi.biClrImportant = 0; if (SUCCEEDED(hr)) hr = pBuffer->Lock(&pixels, nullptr, nullptr); if (SUCCEEDED(hr)) GetDIBits(hdc, hbDesktop, 0, height, pixels, reinterpret_cast<BITMAPINFO*>(&bi), DIB_RGB_COLORS); if (SUCCEEDED(hr)) hr = pBuffer->Unlock(); if (SUCCEEDED(hr)) hr = pBuffer->SetCurrentLength(cbWidth * VIDEO_HEIGHT); if (SUCCEEDED(hr)) hr = MFCreateVideoSampleFromSurface(nullptr, ppSample); if (SUCCEEDED(hr)) hr = (*ppSample)->AddBuffer(pBuffer); return hr; }
// Process the incomming NAL from the queue: wraps it up into a // IMFMediaSample, sends it to the decoder. // // Thread context: decoder thread bool DecoderMF::DoProcessInputNAL(IBMDStreamingH264NALPacket* nalPacket) { bool ret = false; HRESULT hr; IMFMediaBuffer* newBuffer = NULL; BYTE* newBufferPtr; void* nalPacketPtr; // IMFSample* newSample = NULL; ULONGLONG nalPresentationTime; const BYTE nalPrefix[] = {0, 0, 0, 1}; // Get a pointer to the NAL data hr = nalPacket->GetBytes(&nalPacketPtr); if (FAILED(hr)) goto bail; // Create the MF media buffer (+ 4 bytes for the NAL Prefix (0x00 0x00 0x00 0x01) // which MF requires. hr = MFCreateMemoryBuffer(nalPacket->GetPayloadSize()+4, &newBuffer); if (FAILED(hr)) goto bail; // Lock the MF media buffer hr = newBuffer->Lock(&newBufferPtr, NULL, NULL); if (FAILED(hr)) goto bail; // Copy the prefix and the data memcpy(newBufferPtr, nalPrefix, 4); memcpy(newBufferPtr+4, nalPacketPtr, nalPacket->GetPayloadSize()); // Unlock the MF media buffer hr = newBuffer->Unlock(); if (FAILED(hr)) goto bail; // Update the current length of the MF media buffer hr = newBuffer->SetCurrentLength(nalPacket->GetPayloadSize()+4); if (FAILED(hr)) goto bail; // We now have a IMFMediaBuffer with the contents of the NAL // packet. We now construct a IMFSample with the buffer hr = MFCreateSample(&newSample); if (FAILED(hr)) goto bail; hr = newSample->AddBuffer(newBuffer); if (FAILED(hr)) goto bail; // Get the presentation (display) time in 100-nanosecond units // TODO: this is pretty meaningless without setting the start time. hr = nalPacket->GetDisplayTime(1000 * 1000 * 10, &nalPresentationTime); if (FAILED(hr)) goto bail; // Set presentation time on the sample hr = newSample->SetSampleTime(nalPresentationTime); if (FAILED(hr)) goto bail; // Now parse it to the decoder for (;;) { hr = m_h264Decoder->ProcessInput(0, newSample, 0); if (hr == S_OK) break; if (hr != MF_E_NOTACCEPTING || DoProcessOutput() == false) goto bail; } ret = true; bail: if (newBuffer != NULL) newBuffer->Release(); if (newSample != NULL) newSample->Release(); return ret; }
HRESULT WavStream::CreateAudioSample(IMFSample **ppSample) { HRESULT hr = S_OK; IMFMediaBuffer *pBuffer = NULL; IMFSample *pSample = NULL; DWORD cbBuffer = 0; BYTE *pData = NULL; LONGLONG duration = 0; // Start with one second of data, rounded up to the nearest block. cbBuffer = AlignUp<DWORD>(m_pRiff->Format()->nAvgBytesPerSec, m_pRiff->Format()->nBlockAlign); // Don't request any more than what's left. cbBuffer = min(cbBuffer, m_pRiff->BytesRemainingInChunk()); // Create the buffer. hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer); // Get a pointer to the buffer memory. if (SUCCEEDED(hr)) { hr = pBuffer->Lock(&pData, NULL, NULL); } // Fill the buffer if (SUCCEEDED(hr)) { hr = m_pRiff->ReadDataFromChunk(pData, cbBuffer); } // Unlock the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->Unlock(); pData = NULL; } // Set the size of the valid data in the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->SetCurrentLength(cbBuffer); } // Create a new sample and add the buffer to it. if (SUCCEEDED(hr)) { hr = MFCreateSample(&pSample); } if (SUCCEEDED(hr)) { hr = pSample->AddBuffer(pBuffer); } // Set the time stamps, duration, and sample flags. if (SUCCEEDED(hr)) { hr = pSample->SetSampleTime(m_rtCurrentPosition); } if (SUCCEEDED(hr)) { duration = AudioDurationFromBufferSize(m_pRiff->Format(), cbBuffer); hr = pSample->SetSampleDuration(duration); } // Set the discontinuity flag. if (SUCCEEDED(hr)) { if (m_discontinuity) { hr = pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE); } } if (SUCCEEDED(hr)) { // Update our current position. m_rtCurrentPosition += duration; // Give the pointer to the caller. *ppSample = pSample; (*ppSample)->AddRef(); } if (pData && pBuffer) { hr = pBuffer->Unlock(); } SafeRelease(&pBuffer); SafeRelease(&pSample); return hr; }
void MfVideoEncoder::WriteFrame(uint8_t* data, int stride) { CComPtr<IMFSample> pSample; CComPtr<IMFMediaBuffer> pBuffer; const LONG cbWidth = 4 * mWidth; const DWORD cbBuffer = cbWidth * mHeight; BYTE *pData = NULL; // Create a new memory buffer. HRESULT hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer); // Lock the buffer and copy the video frame to the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->Lock(&pData, NULL, NULL); } if (SUCCEEDED(hr)) { hr = MFCopyImage( pData, // Destination buffer. cbWidth, // Destination stride. (BYTE*)data, // First row in source image. stride, // Source stride. cbWidth, // Image width in bytes. mHeight // Image height in pixels. ); } if (pBuffer) { pBuffer->Unlock(); } // Set the data length of the buffer. if (SUCCEEDED(hr)) { hr = pBuffer->SetCurrentLength(cbBuffer); } // Create a media sample and add the buffer to the sample. if (SUCCEEDED(hr)) { hr = MFCreateSample(&pSample); } if (SUCCEEDED(hr)) { hr = pSample->AddBuffer(pBuffer); } // Set the time stamp and the duration. if (SUCCEEDED(hr)) { hr = pSample->SetSampleTime(mCurrentTime); } if (SUCCEEDED(hr)) { hr = pSample->SetSampleDuration(mFrameTime); } // Send the sample to the Sink Writer. if (SUCCEEDED(hr)) { hr = mSinkWriter->WriteSample(mStreamIndex, pSample); } if (!SUCCEEDED(hr)) { throw TempleException("Unable to write video frame: {}", hr); } mCurrentTime += mFrameTime; }
void EncodeTransform::Init(int width, int height) { VTUNE_TASK(g_pDomain, "Encoder Init"); mStreamHeight = height; mStreamWidth = width; mCompressedBuffer = new DWORD[(width*height) / 2]; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); if (SUCCEEDED(hr) || (FAILED(hr) && hr == RPC_E_CHANGED_MODE)) { hr = FindEncoder(MFVideoFormat_H264); if (FAILED(hr)) { std::cout << "failed to find/Create specified encoder mft" << std::endl; } // set the media output info hr = SetOutputMediaType(); if (FAILED(hr)) { std::cout << "failed to SetOutputMediaType " << std::endl; } // set the media input info hr = SetInputMediaType(); if (FAILED(hr)) { std::cout << "failed to SetInputMediaType" << std::endl; } // query media input stream info hr = QueryInputStreamInfo(); if (FAILED(hr)) { std::cout << "failed to QueryInputStreamInfo" << std::endl; } { //used to be NUM_PIXELS_YUY2 * 4 HRESULT hr = MFCreateMemoryBuffer((mStreamWidth*mStreamHeight) * 2, &mpInputBuffer); if (FAILED(hr)) { std::cout << "Failed to MFCreateMemoryBuffer" << std::endl; } hr = MFCreateSample(&pSampleProcIn); if (FAILED(hr)) { std::cout << "Failed to MFCreateSample" << std::endl; } hr = pSampleProcIn->AddBuffer(mpInputBuffer); if (FAILED(hr)) { std::cout << "Failed to AddBuffer to sample" << std::endl; } } { //used to be NUM_PIXELS_YUY2 * 4 hr = MFCreateMemoryBuffer(mStreamWidth * mStreamWidth * 2, &mpEncodedBuffer); if (FAILED(hr)) { std::cout << "Failed to MFCreateMemoryBuffer" << std::endl; } hr = MFCreateSample(&pSampleProcOut); if (FAILED(hr)) { std::cout << "Failed to MFCreateSample" << std::endl; } hr = pSampleProcOut->AddBuffer(mpEncodedBuffer); if (FAILED(hr)) { std::cout << "Failed to AddBuffer to sample" << std::endl; } } } }
VideoCompressorResult VideoCompressor::OpenFile(const String &Filename, UINT Width, UINT Height, UINT BitRate, UINT FrameRate, UINT AudioDeviceIndex, Clock *Timer) { VideoCompressorResult Result = VideoCompressorResultSuccess; _Width = Width; _Height = Height; _CapturingAudio = (AudioDeviceIndex != 0xFFFFFFFF); _Clock = Timer; HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE); //PersistentAssert(SUCCEEDED(hr), "CoInitializeEx failed"); hr = MFStartup(MF_VERSION); PersistentAssert(SUCCEEDED(hr), "MFStartup failed"); hr = MFCreateSinkWriterFromURL( UnicodeString(Filename).CString(), NULL, NULL, &_Writer ); PersistentAssert(SUCCEEDED(hr), "MFCreateSinkWriterFromURL failed"); const UINT RawBufferSize = Width * Height * 4; IMFMediaType *OutputMediaType; MFCreateMediaType(&OutputMediaType); InitMediaType(OutputMediaType, MFVideoFormat_H264, BitRate, Width, Height, FrameRate); IMFMediaType *InputMediaType; MFCreateMediaType(&InputMediaType); InitMediaType(InputMediaType, MFVideoFormat_RGB32, RawBufferSize, Width, Height, FrameRate); DWORD VideoStreamIndex; hr = _Writer->AddStream(OutputMediaType, &VideoStreamIndex); PersistentAssert(SUCCEEDED(hr), "AddStream failed"); OutputMediaType->Release(); /*hr = MFTRegisterLocalByCLSID( __uuidof(CColorConvertDMO), MFT_CATEGORY_VIDEO_PROCESSOR, L"", MFT_ENUM_FLAG_SYNCMFT, 0, NULL, 0, NULL ); PersistentAssert(SUCCEEDED(hr), "MFTRegisterLocalByCLSID failed");*/ hr = _Writer->SetInputMediaType(VideoStreamIndex, InputMediaType, NULL); InputMediaType->Release(); if(FAILED(hr)) { if(Width > 1920 || Height > 1080) { MessageBox(NULL, "The maximum resolution for H.264 video is 1920x1080.", "Invalid Window Dimensions", MB_OK | MB_ICONERROR); } else { MessageBox(NULL, "There was an error when attempting to initialize video capture. The maximum resolution for H.264 video is 1920x1080.", "Invalid Window Dimensions", MB_OK | MB_ICONERROR); } _Writer->Release(); _Writer = NULL; _Clock = NULL; return VideoCompressorResultFailure; } if(_CapturingAudio) { // // Setup the output media type // IMFMediaType *OutputAudioType; hr = MFCreateMediaType( &OutputAudioType ); PersistentAssert(SUCCEEDED(hr), "MFCreateMediaType failed"); const UINT SamplesPerSecond = 44100; const UINT AverageBytesPerSecond = 24000; const UINT ChannelCount = 2; const UINT BitsPerSample = 16; OutputAudioType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio ) ; OutputAudioType->SetGUID( MF_MT_SUBTYPE, MFAudioFormat_AAC ) ; OutputAudioType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, SamplesPerSecond ) ; OutputAudioType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, BitsPerSample ) ; OutputAudioType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, ChannelCount ) ; OutputAudioType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, AverageBytesPerSecond ) ; OutputAudioType->SetUINT32( MF_MT_AUDIO_BLOCK_ALIGNMENT, 1 ) ; //OutputAudioType->SetUINT32( MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, 0x29 ) ; DWORD AudioStreamIndex; hr = _Writer->AddStream( OutputAudioType, &AudioStreamIndex ); PersistentAssert(SUCCEEDED(hr), "AddStream failed"); // // Setup the input media type // IMFMediaType *InputAudioType; MFCreateMediaType( &InputAudioType ); InputAudioType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio ); InputAudioType->SetGUID( MF_MT_SUBTYPE, MFAudioFormat_PCM ); InputAudioType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, BitsPerSample ); InputAudioType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, SamplesPerSecond ); InputAudioType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, ChannelCount ); hr = _Writer->SetInputMediaType( AudioStreamIndex, InputAudioType, NULL ); PersistentAssert(SUCCEEDED(hr), "SetInputMediaType failed"); _AudioCapture.StartCapture(this, AudioDeviceIndex); } hr = _Writer->BeginWriting(); PersistentAssert(SUCCEEDED(hr), "BeginWriting failed"); hr = MFCreateSample(&_Sample); PersistentAssert(SUCCEEDED(hr), "MFCreateSample failed"); hr = MFCreateMemoryBuffer(RawBufferSize, &_Buffer); _Buffer->SetCurrentLength(RawBufferSize); _Sample->AddBuffer(_Buffer); return Result; }
void VideoCompressor::AudioSample32Bit2Channel(float *Samples, UINT FrameCount, UINT64 CaptureStartTime) { //double TimeInSeconds = _Clock->Elapsed(); const UINT SamplesPerSecond = 44100; const UINT ChannelCount = 2; const UINT SampleCount = FrameCount * ChannelCount; const UINT BitsPerSample = 16; const UINT BufferLength = BitsPerSample / 8 * ChannelCount * FrameCount; const LONGLONG SampleDuration = LONGLONG(FrameCount) * LONGLONG(10000000) / SamplesPerSecond; // in hns // // Write some data // IMFSample *spSample; IMFMediaBuffer *spBuffer; BYTE *pbBuffer = NULL; // // Create a media sample // HRESULT hr = MFCreateSample( &spSample ); hr = spSample->SetSampleDuration( SampleDuration ); //hr = spSample->SetSampleTime( LONGLONG( TimeInSeconds * 10000000.0 ) ); //CaptureStartTime = 10,000,000 * t / f; //t = CaptureStartTime * f / 10,000,000 LONGLONG FileStartCounter = _Clock->StartTime(); LONGLONG CaptureStartCounter = CaptureStartTime * _Clock->TicksPerSecond() / LONGLONG(10000000); hr = spSample->SetSampleTime( ( CaptureStartCounter - FileStartCounter ) * LONGLONG(10000000) / _Clock->TicksPerSecond() ); // // Add a media buffer filled with random data // hr = MFCreateMemoryBuffer( BufferLength, &spBuffer ); hr = spBuffer->SetCurrentLength( BufferLength ); hr = spSample->AddBuffer( spBuffer ); hr = spBuffer->Lock( &pbBuffer, NULL, NULL ); __int16 *OutputAudioBuffer = (__int16 *)pbBuffer; for(UINT SampleIndex = 0; SampleIndex < SampleCount; SampleIndex++) { // // Floats are in the range -1 to 1 // OutputAudioBuffer[SampleIndex] = int(Samples[SampleIndex] * 32768.0f); } hr = spBuffer->Unlock(); // // Write the media sample // hr = _Writer->WriteSample( 1, spSample ); PersistentAssert(SUCCEEDED(hr), "WriteSample failed"); spSample->Release(); spBuffer->Release(); }
virtual void doGetNextFrame() { if (!_isInitialised) { _isInitialised = true; if (!initialise()) { printf("Video device initialisation failed, stopping."); return; } } if (!isCurrentlyAwaitingData()) return; DWORD processOutputStatus = 0; IMFSample *videoSample = NULL; DWORD streamIndex, flags; LONGLONG llVideoTimeStamp, llSampleDuration; HRESULT mftProcessInput = S_OK; HRESULT mftProcessOutput = S_OK; MFT_OUTPUT_STREAM_INFO StreamInfo; IMFMediaBuffer *pBuffer = NULL; IMFSample *mftOutSample = NULL; DWORD mftOutFlags; bool frameSent = false; CHECK_HR(_videoReader->ReadSample( MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), "Error reading video sample."); if (videoSample) { _frameCount++; CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n"); CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n"); // Pass the video sample to the H.264 transform. CHECK_HR(_pTransform->ProcessInput(0, videoSample, 0), "The resampler H264 ProcessInput call failed.\n"); CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n"); if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY) { printf("Sample ready.\n"); CHECK_HR(_pTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H264 MFT.\n"); CHECK_HR(MFCreateSample(&mftOutSample), "Failed to create MF sample.\n"); CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n"); CHECK_HR(mftOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n"); while (true) { _outputDataBuffer.dwStreamID = 0; _outputDataBuffer.dwStatus = 0; _outputDataBuffer.pEvents = NULL; _outputDataBuffer.pSample = mftOutSample; mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus); if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT) { CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n"); CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n"); IMFMediaBuffer *buf = NULL; DWORD bufLength; CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n"); CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n"); BYTE * rawBuffer = NULL; auto now = GetTickCount(); printf("Writing sample %i, spacing %I64dms, sample time %I64d, sample duration %I64d, sample size %i.\n", _frameCount, now - _lastSendAt, llVideoTimeStamp, llSampleDuration, bufLength); fFrameSize = bufLength; fDurationInMicroseconds = 0; gettimeofday(&fPresentationTime, NULL); buf->Lock(&rawBuffer, NULL, NULL); memmove(fTo, rawBuffer, fFrameSize); FramedSource::afterGetting(this); buf->Unlock(); SafeRelease(&buf); frameSent = true; _lastSendAt = GetTickCount(); } SafeRelease(&pBuffer); SafeRelease(&mftOutSample); break; } } else { printf("No sample.\n"); } SafeRelease(&videoSample); } if (!frameSent) { envir().taskScheduler().triggerEvent(eventTriggerId, this); } return; done: printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n"); }
STDMETHODIMP MFTransform::ProcessOutput(DWORD dwFlags, DWORD cOutputBufferCount, MFT_OUTPUT_DATA_BUFFER *pOutputSamples, DWORD *pdwStatus) { if (dwFlags != 0) return E_INVALIDARG; if (pOutputSamples == NULL || pdwStatus == NULL) return E_POINTER; if (cOutputBufferCount != 1) return E_INVALIDARG; QMutexLocker locker(&m_mutex); if (!m_sample) return MF_E_TRANSFORM_NEED_MORE_INPUT; IMFMediaBuffer *input = NULL; IMFMediaBuffer *output = NULL; DWORD sampleLength = 0; m_sample->GetTotalLength(&sampleLength); // If the sample length is null, it means we're getting DXVA buffers. // In that case just pass on the sample we got as input. // Otherwise we need to copy the input buffer into the buffer the sink // is giving us. if (pOutputSamples[0].pSample && sampleLength > 0) { if (FAILED(m_sample->ConvertToContiguousBuffer(&input))) goto done; if (FAILED(pOutputSamples[0].pSample->ConvertToContiguousBuffer(&output))) goto done; DWORD inputLength = 0; DWORD outputLength = 0; input->GetMaxLength(&inputLength); output->GetMaxLength(&outputLength); if (outputLength < inputLength) { pOutputSamples[0].pSample->RemoveAllBuffers(); output->Release(); output = NULL; if (SUCCEEDED(MFCreateMemoryBuffer(inputLength, &output))) pOutputSamples[0].pSample->AddBuffer(output); } if (output) m_sample->CopyToBuffer(output); LONGLONG hnsDuration = 0; LONGLONG hnsTime = 0; if (SUCCEEDED(m_sample->GetSampleDuration(&hnsDuration))) pOutputSamples[0].pSample->SetSampleDuration(hnsDuration); if (SUCCEEDED(m_sample->GetSampleTime(&hnsTime))) pOutputSamples[0].pSample->SetSampleTime(hnsTime); } else { if (pOutputSamples[0].pSample) pOutputSamples[0].pSample->Release(); pOutputSamples[0].pSample = m_sample; pOutputSamples[0].pSample->AddRef(); } done: pOutputSamples[0].dwStatus = 0; *pdwStatus = 0; m_sample->Release(); m_sample = 0; if (input) input->Release(); if (output) output->Release(); return S_OK; }