Beispiel #1
0
//-------------------------------------------------------------------
// Write the sample 
//
HRESULT VidWriter::writeFrame(BYTE *pData)
{
	HRESULT hr;
    IMFSample *pSample = NULL;
    const DWORD cbBuffer = 4 * m_width * m_height;

	// Unlock the buffer
    if (m_pBuffer) m_pBuffer->Unlock();	

	// Set the data length of the buffer
    hr = m_pBuffer->SetCurrentLength(cbBuffer);
	if (FAILED(hr)) goto done;	

    // Create a media sample and add the buffer to it
    hr = MFCreateSample(&pSample);
	if (FAILED(hr)) goto done;
	hr = pSample->AddBuffer(m_pBuffer);
	if (FAILED(hr)) goto done;

    // Set the time stamp and the duration
    hr = pSample->SetSampleTime(m_rtStart);
	if (FAILED(hr)) goto done;
	hr = pSample->SetSampleDuration(m_frametime);
	if (FAILED(hr)) goto done;

	// increment the time stamp
	m_rtStart += m_frametime;

    // Send the sample to the Sink Writer
    hr = m_pWriter->WriteSample(m_streamIndex, pSample);

done:
    SafeRelease(&pSample);
    return hr;
}
Beispiel #2
0
HRESULT VideoEncoder::WriteTransitionSample(UINT64 sampleDuration, TransitionBase* pTransition, DWORD streamIndex, LONGLONG* startTime)
{
	HRESULT hr = S_OK;
	IMFMediaBuffer* pMediaBuffer = nullptr;
	BYTE* pFrameBuffer = nullptr;
	IMFSample* pSample = nullptr;
	BYTE* pOutputFrame = nullptr;

	for (DWORD i = 0; i < sampleDuration; i++)
    {
		CheckHR(MFCreateMemoryBuffer(this->m_frameBufferSize, &pMediaBuffer));
		pMediaBuffer->Lock(&pFrameBuffer, nullptr, nullptr);
		float time = (float)i / (float)sampleDuration;
		pOutputFrame = pTransition->GetOutputFrame(time);
		CheckHR(MFCopyImage(pFrameBuffer, this->m_frameStride, pOutputFrame, this->m_frameStride, this->m_frameStride, this->m_frameHeight));
		CheckHR(pMediaBuffer->Unlock());
		CheckHR(pMediaBuffer->SetCurrentLength(this->m_frameBufferSize));
		CheckHR(MFCreateSample(&pSample));
		CheckHR(pSample->AddBuffer(pMediaBuffer));
		CheckHR(pSample->SetSampleTime(*startTime));
		CheckHR(pSample->SetSampleDuration(this->GetFrameDuration()));
		CheckHR(this->m_pSinkWriter->WriteSample(streamIndex, pSample));
		(*startTime) += this->GetFrameDuration();

		// 释放示例资源.
		SafeRelease(&pMediaBuffer);
		SafeRelease(&pSample);
		if (pOutputFrame != nullptr)
		{
			delete pOutputFrame;
			pOutputFrame = nullptr;
		}
	}

cleanup:
	if (!SUCCEEDED(hr))
	{
		DWORD error = GetLastError();
		this->m_logFileStream << "意外错误: " << error << endl;
	}
	SafeRelease(&pMediaBuffer);
	SafeRelease(&pSample);
	if (pOutputFrame != nullptr)
	{
		delete pOutputFrame;
		pOutputFrame = nullptr;
	}
	return hr;
}
// Process the incomming NAL from the queue: wraps it up into a
// IMFMediaSample, sends it to the decoder.
//
// Thread context: decoder thread
bool DecoderMF::DoProcessInputNAL(IBMDStreamingH264NALPacket* nalPacket)
{
	bool				ret = false;
	HRESULT				hr;
	IMFMediaBuffer*		newBuffer = NULL;
	BYTE*				newBufferPtr;
	void*				nalPacketPtr;
	//
	IMFSample*			newSample = NULL;
	ULONGLONG			nalPresentationTime;
	const BYTE			nalPrefix[] = {0, 0, 0, 1};

	// Get a pointer to the NAL data
	hr = nalPacket->GetBytes(&nalPacketPtr);
	if (FAILED(hr))
		goto bail;

	// Create the MF media buffer (+ 4 bytes for the NAL Prefix (0x00 0x00 0x00 0x01)
	// which MF requires.
	hr = MFCreateMemoryBuffer(nalPacket->GetPayloadSize()+4, &newBuffer);
	if (FAILED(hr))
		goto bail;

	// Lock the MF media buffer
	hr = newBuffer->Lock(&newBufferPtr, NULL, NULL);
	if (FAILED(hr))
		goto bail;

	// Copy the prefix and the data
	memcpy(newBufferPtr, nalPrefix, 4);
	memcpy(newBufferPtr+4, nalPacketPtr, nalPacket->GetPayloadSize());

	// Unlock the MF media buffer
	hr = newBuffer->Unlock();
	if (FAILED(hr))
		goto bail;

	// Update the current length of the MF media buffer
	hr = newBuffer->SetCurrentLength(nalPacket->GetPayloadSize()+4);
	if (FAILED(hr))
		goto bail;

	// We now have a IMFMediaBuffer with the contents of the NAL
	// packet. We now construct a IMFSample with the buffer
	hr = MFCreateSample(&newSample);
	if (FAILED(hr))
		goto bail;

	hr = newSample->AddBuffer(newBuffer);
	if (FAILED(hr))
		goto bail;

	// Get the presentation (display) time in 100-nanosecond units
	// TODO: this is pretty meaningless without setting the start time.
	hr = nalPacket->GetDisplayTime(1000 * 1000 * 10, &nalPresentationTime);
	if (FAILED(hr))
		goto bail;

	// Set presentation time on the sample
	hr = newSample->SetSampleTime(nalPresentationTime);
	if (FAILED(hr))
		goto bail;

	// Now parse it to the decoder
	for (;;)
	{
		hr = m_h264Decoder->ProcessInput(0, newSample, 0);
		if (hr == S_OK)
			break;
		if (hr != MF_E_NOTACCEPTING || DoProcessOutput() == false)
			goto bail;
	}

	ret = true;

bail:
	if (newBuffer != NULL)
		newBuffer->Release();

	if (newSample != NULL)
		newSample->Release();

	return ret;
}
HRESULT WavStream::CreateAudioSample(IMFSample **ppSample)
{
    HRESULT hr = S_OK;

    IMFMediaBuffer *pBuffer = NULL;
    IMFSample *pSample = NULL;

    DWORD       cbBuffer = 0;
    BYTE        *pData = NULL;
    LONGLONG    duration = 0;

    // Start with one second of data, rounded up to the nearest block.
    cbBuffer = AlignUp<DWORD>(m_pRiff->Format()->nAvgBytesPerSec, m_pRiff->Format()->nBlockAlign);

    // Don't request any more than what's left.
    cbBuffer = min(cbBuffer, m_pRiff->BytesRemainingInChunk());

    // Create the buffer.
    hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);

    // Get a pointer to the buffer memory.
    if (SUCCEEDED(hr))
    {   
        hr = pBuffer->Lock(&pData, NULL, NULL);
    }

    // Fill the buffer
    if (SUCCEEDED(hr))
    {   
        hr = m_pRiff->ReadDataFromChunk(pData, cbBuffer);
    }

    // Unlock the buffer.
    if (SUCCEEDED(hr))
    {   
        hr = pBuffer->Unlock();
        pData = NULL;
    }

    // Set the size of the valid data in the buffer.
    if (SUCCEEDED(hr))
    {   
        hr = pBuffer->SetCurrentLength(cbBuffer);
    }

    // Create a new sample and add the buffer to it.
    if (SUCCEEDED(hr))
    {   
        hr = MFCreateSample(&pSample);
    }

    if (SUCCEEDED(hr))
    {   
        hr = pSample->AddBuffer(pBuffer);
    }

    // Set the time stamps, duration, and sample flags.
    if (SUCCEEDED(hr))
    {   
        hr = pSample->SetSampleTime(m_rtCurrentPosition);
    }

    if (SUCCEEDED(hr))
    {   
        duration = AudioDurationFromBufferSize(m_pRiff->Format(), cbBuffer);
        hr = pSample->SetSampleDuration(duration);
    }

    // Set the discontinuity flag.
    if (SUCCEEDED(hr))
    {   
        if (m_discontinuity)
        {
            hr = pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
        }
    }

    if (SUCCEEDED(hr))
    {   
        // Update our current position.
        m_rtCurrentPosition += duration;

        // Give the pointer to the caller.
        *ppSample = pSample;
        (*ppSample)->AddRef();
    }

    if (pData && pBuffer)
    {
        hr = pBuffer->Unlock();
    }

    SafeRelease(&pBuffer);
    SafeRelease(&pSample);
    return hr;
}
Beispiel #5
0
void VideoCompressor::AudioSample32Bit2Channel(float *Samples, UINT FrameCount, UINT64 CaptureStartTime)
{
    //double TimeInSeconds = _Clock->Elapsed();

    const UINT SamplesPerSecond = 44100;
    const UINT ChannelCount = 2;
    const UINT SampleCount = FrameCount * ChannelCount;
    const UINT BitsPerSample = 16;
    const UINT BufferLength = BitsPerSample / 8 * ChannelCount * FrameCount;
    const LONGLONG SampleDuration = LONGLONG(FrameCount) * LONGLONG(10000000) / SamplesPerSecond; // in hns
    
    //
    // Write some data
    //
    IMFSample *spSample;
    IMFMediaBuffer *spBuffer;
    BYTE *pbBuffer = NULL;

    //
    // Create a media sample
    //

    HRESULT hr = MFCreateSample( &spSample );
    hr = spSample->SetSampleDuration( SampleDuration );
    
    //hr = spSample->SetSampleTime( LONGLONG( TimeInSeconds * 10000000.0 ) );
    
    //CaptureStartTime = 10,000,000 * t / f;
    //t = CaptureStartTime * f / 10,000,000
    LONGLONG FileStartCounter = _Clock->StartTime();
    LONGLONG CaptureStartCounter = CaptureStartTime * _Clock->TicksPerSecond() / LONGLONG(10000000);
    hr = spSample->SetSampleTime( ( CaptureStartCounter - FileStartCounter ) * LONGLONG(10000000) / _Clock->TicksPerSecond() );

    //
    // Add a media buffer filled with random data
    //

    hr = MFCreateMemoryBuffer( BufferLength, &spBuffer );
    hr = spBuffer->SetCurrentLength( BufferLength );
    hr = spSample->AddBuffer( spBuffer );

    hr = spBuffer->Lock( &pbBuffer, NULL, NULL );
    __int16 *OutputAudioBuffer = (__int16 *)pbBuffer;
    for(UINT SampleIndex = 0; SampleIndex < SampleCount; SampleIndex++)
    {
        //
        // Floats are in the range -1 to 1
        //
        OutputAudioBuffer[SampleIndex] = int(Samples[SampleIndex] * 32768.0f);
    }
    hr = spBuffer->Unlock();

    //
    // Write the media sample
    //
    hr = _Writer->WriteSample( 1, spSample );
    PersistentAssert(SUCCEEDED(hr), "WriteSample failed");

    spSample->Release();
    spBuffer->Release();
}
	virtual void doGetNextFrame()
	{
		if (!_isInitialised)
		{
			_isInitialised = true;
			if (!initialise())
			{
				printf("Video device initialisation failed, stopping.");
				return;
			}
		}

		if (!isCurrentlyAwaitingData()) return;

		DWORD processOutputStatus = 0;
		IMFSample *videoSample = NULL;
		DWORD streamIndex, flags;
		LONGLONG llVideoTimeStamp, llSampleDuration;
		HRESULT mftProcessInput = S_OK;
		HRESULT mftProcessOutput = S_OK;
		MFT_OUTPUT_STREAM_INFO StreamInfo;
		IMFMediaBuffer *pBuffer = NULL;
		IMFSample *mftOutSample = NULL;
		DWORD mftOutFlags;
		bool frameSent = false;

		CHECK_HR(_videoReader->ReadSample(
			MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			0,                              // Flags.
			&streamIndex,                   // Receives the actual stream index. 
			&flags,                         // Receives status flags.
			&llVideoTimeStamp,              // Receives the time stamp.
			&videoSample                    // Receives the sample or NULL.
			), "Error reading video sample.");

		if (videoSample)
		{
			_frameCount++;

			CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n");
			CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n");

			// Pass the video sample to the H.264 transform.

			CHECK_HR(_pTransform->ProcessInput(0, videoSample, 0), "The resampler H264 ProcessInput call failed.\n");

			CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n");

			if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY)
			{
				printf("Sample ready.\n");

				CHECK_HR(_pTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H264 MFT.\n");

				CHECK_HR(MFCreateSample(&mftOutSample), "Failed to create MF sample.\n");
				CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n");
				CHECK_HR(mftOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n");

				while (true)
				{
					_outputDataBuffer.dwStreamID = 0;
					_outputDataBuffer.dwStatus = 0;
					_outputDataBuffer.pEvents = NULL;
					_outputDataBuffer.pSample = mftOutSample;

					mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus);

					if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT)
					{
						CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n");
						CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n");

						IMFMediaBuffer *buf = NULL;
						DWORD bufLength;
						CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n");
						CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n");
						BYTE * rawBuffer = NULL;

						auto now = GetTickCount();

						printf("Writing sample %i, spacing %I64dms, sample time %I64d, sample duration %I64d, sample size %i.\n", _frameCount, now - _lastSendAt, llVideoTimeStamp, llSampleDuration, bufLength);

						fFrameSize = bufLength;
						fDurationInMicroseconds = 0;
						gettimeofday(&fPresentationTime, NULL);

						buf->Lock(&rawBuffer, NULL, NULL);
						memmove(fTo, rawBuffer, fFrameSize);

						FramedSource::afterGetting(this);

						buf->Unlock();
						SafeRelease(&buf);

						frameSent = true;
						_lastSendAt = GetTickCount();
					}

					SafeRelease(&pBuffer);
					SafeRelease(&mftOutSample);

					break;
				}
			}
			else {
				printf("No sample.\n");
			}

			SafeRelease(&videoSample);
		}

		if (!frameSent)
		{
			envir().taskScheduler().triggerEvent(eventTriggerId, this);
		}

		return;

	done:

		printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n");
	}
Beispiel #7
0
STDMETHODIMP CDecWMV9MFT::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pMediaSample)
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  hr = m_pMFT->GetInputStatus(0, &dwStatus);
  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> GetInputStatus() failed with hr: 0x%x", hr));
    return S_FALSE;
  }
  if (!(dwStatus & MFT_INPUT_STATUS_ACCEPT_DATA))
    return S_FALSE;

  if (m_vc1Header && (m_bManualReorder || m_bNeedKeyFrame)) {
    AVPictureType pictype = m_vc1Header->ParseVC1PictureType(buffer, buflen);
    if (m_bManualReorder) {
      if (pictype == AV_PICTURE_TYPE_I || pictype == AV_PICTURE_TYPE_P) {
        if (m_bReorderBufferValid)
          m_timestampQueue.push(m_rtReorderBuffer);
        m_rtReorderBuffer = rtStart;
        m_bReorderBufferValid = TRUE;
      } else {
        m_timestampQueue.push(rtStart);
      }
    }

    if (m_bNeedKeyFrame) {
      if (pictype != AV_PICTURE_TYPE_I) {
        if (m_bManualReorder)
          m_timestampQueue.pop();
        return S_OK;
      } else {
        m_bNeedKeyFrame = FALSE;
        bSyncPoint = TRUE;
      }
    }
  }

  IMFSample *pSample = nullptr;
  hr = MF.CreateSample(&pSample);
  if (FAILED(hr)) {
    DbgLog((LOG_ERROR, 10, L"Unable to allocate MF Sample, hr: 0x%x", hr));
    return E_FAIL;
  }
  
  IMFMediaBuffer *pMFBuffer = CreateMediaBuffer(buffer, buflen);
  if (!pMFBuffer) {
    DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffer"));
    SafeRelease(&pSample);
    return E_FAIL;
  }

  pSample->AddBuffer(pMFBuffer);

  if (rtStart != AV_NOPTS_VALUE) {
    pSample->SetSampleTime(rtStart);
    if (rtStop != AV_NOPTS_VALUE && rtStop > (rtStart - 1))
      pSample->SetSampleDuration(rtStop - rtStart);
  }

  pSample->SetUINT32(MFSampleExtension_CleanPoint,    bSyncPoint);
  pSample->SetUINT32(MFSampleExtension_Discontinuity, bDiscontinuity);

  hr = m_pMFT->ProcessInput(0, pSample, 0);

  if (hr == MF_E_NOTACCEPTING) {
    // Not accepting data right now, try to process output and try again
    ProcessOutput();
    hr = m_pMFT->ProcessInput(0, pSample, 0);
  }

  SafeRelease(&pMFBuffer);
  SafeRelease(&pSample);

  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> ProcessInput failed with hr: 0x%x", hr));
    return E_FAIL;
  }

  return ProcessOutput();
}