void D3DPresentEngine::presentSample(void *opaque, qint64) { HRESULT hr = S_OK; IMFSample *sample = reinterpret_cast<IMFSample*>(opaque); IMFMediaBuffer* buffer = NULL; IDirect3DSurface9* surface = NULL; if (m_surface && m_surface->isActive()) { if (sample) { // Get the buffer from the sample. hr = sample->GetBufferByIndex(0, &buffer); if (FAILED(hr)) goto done; // Get the surface from the buffer. hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface)); if (FAILED(hr)) goto done; } if (surface && updateTexture(surface)) { QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture), m_surfaceFormat.frameSize(), m_surfaceFormat.pixelFormat()); // WMF uses 100-nanosecond units, Qt uses microseconds LONGLONG startTime = -1; if (SUCCEEDED(sample->GetSampleTime(&startTime))) { frame.setStartTime(startTime * 0.1); LONGLONG duration = -1; if (SUCCEEDED(sample->GetSampleDuration(&duration))) frame.setEndTime((startTime + duration) * 0.1); } m_surface->present(frame); } } done: qt_wmf_safeRelease(&surface); qt_wmf_safeRelease(&buffer); qt_wmf_safeRelease(&sample); }
virtual void doGetNextFrame() { if (!_isInitialised) { _isInitialised = true; if (!initialise()) { printf("Video device initialisation failed, stopping."); return; } } if (!isCurrentlyAwaitingData()) return; DWORD processOutputStatus = 0; IMFSample *videoSample = NULL; DWORD streamIndex, flags; LONGLONG llVideoTimeStamp, llSampleDuration; HRESULT mftProcessInput = S_OK; HRESULT mftProcessOutput = S_OK; MFT_OUTPUT_STREAM_INFO StreamInfo; IMFMediaBuffer *pBuffer = NULL; IMFSample *mftOutSample = NULL; DWORD mftOutFlags; bool frameSent = false; CHECK_HR(_videoReader->ReadSample( MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), "Error reading video sample."); if (videoSample) { _frameCount++; CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n"); CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n"); // Pass the video sample to the H.264 transform. CHECK_HR(_pTransform->ProcessInput(0, videoSample, 0), "The resampler H264 ProcessInput call failed.\n"); CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n"); if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY) { printf("Sample ready.\n"); CHECK_HR(_pTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H264 MFT.\n"); CHECK_HR(MFCreateSample(&mftOutSample), "Failed to create MF sample.\n"); CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n"); CHECK_HR(mftOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n"); while (true) { _outputDataBuffer.dwStreamID = 0; _outputDataBuffer.dwStatus = 0; _outputDataBuffer.pEvents = NULL; _outputDataBuffer.pSample = mftOutSample; mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus); if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT) { CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n"); CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n"); IMFMediaBuffer *buf = NULL; DWORD bufLength; CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n"); CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n"); BYTE * rawBuffer = NULL; auto now = GetTickCount(); printf("Writing sample %i, spacing %I64dms, sample time %I64d, sample duration %I64d, sample size %i.\n", _frameCount, now - _lastSendAt, llVideoTimeStamp, llSampleDuration, bufLength); fFrameSize = bufLength; fDurationInMicroseconds = 0; gettimeofday(&fPresentationTime, NULL); buf->Lock(&rawBuffer, NULL, NULL); memmove(fTo, rawBuffer, fFrameSize); FramedSource::afterGetting(this); buf->Unlock(); SafeRelease(&buf); frameSent = true; _lastSendAt = GetTickCount(); } SafeRelease(&pBuffer); SafeRelease(&mftOutSample); break; } } else { printf("No sample.\n"); } SafeRelease(&videoSample); } if (!frameSent) { envir().taskScheduler().triggerEvent(eventTriggerId, this); } return; done: printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n"); }