示例#1
1
HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt)
{
	//printf("Get Sample...\n");

	IMFSample *videoSample = NULL;

	// Initial read results in a null pSample??
	CHECK_HR(videoReader->ReadSample(
		MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index. 
		&flags,                         // Receives status flags.
		&llVideoTimeStamp,                   // Receives the time stamp.
		&videoSample                        // Receives the sample or NULL.
		), L"Error reading video sample.");

	if (!videoSample)
	{
		printf("Failed to get video sample from MF.\n");
	}
	else
	{
		DWORD nCurrBufferCount = 0;
		CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n");

		IMFMediaBuffer * pMediaBuffer;
		CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n");

		DWORD nCurrLen = 0;
		CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n");

		byte *imgBuff;
		DWORD buffCurrLen = 0;
		DWORD buffMaxLen = 0;
		pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen);
		
		/*BYTE *i420 = new BYTE[4608000];
		YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420);
		vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/
		
		vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff);
		
		const vpx_codec_cx_pkt_t * pkt;
		vpx_enc_frame_flags_t flags = 0;
		
		if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) {
			printf("VPX codec failed to encode the frame.\n");
			return -1;
		}
		else {
			vpx_codec_iter_t iter = NULL;

			while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) {
				switch (pkt->kind) {
				case VPX_CODEC_CX_FRAME_PKT:                                
					vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt);
					break;
				default:
					break;
				}

				printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz);
			}
		}

		_sampleCount++;

		vpx_img_free(img);

		pMediaBuffer->Unlock();
		pMediaBuffer->Release();

		//delete i420;

		videoSample->Release();

		return S_OK;
	}
}
示例#2
0
文件: wmfbase.cpp 项目: TimSC/wmfcam
DWORD SampleToStaticObj(IMFSample *pSample, char **buff)
{
	if(*buff!=NULL)
		throw runtime_error("Buff ptr should be initially null");
	IMFMediaBuffer *ppBuffer = NULL;
	HRESULT hr = pSample->ConvertToContiguousBuffer(&ppBuffer);
	//cout << "ConvertToContiguousBuffer=" << SUCCEEDED(hr) << "\tstride="<< plStride << "\n";

	IMF2DBuffer *m_p2DBuffer = NULL;
	ppBuffer->QueryInterface(IID_IMF2DBuffer, (void**)&m_p2DBuffer);
	//cout << "IMF2DBuffer=" << (m_p2DBuffer != NULL) << "\n";

	DWORD pcbCurrentLength = 0;
	BYTE *ppbBuffer = NULL;
	DWORD pcbMaxLength = 0;

	if(SUCCEEDED(hr))
	{
		
		hr = ppBuffer->Lock(&ppbBuffer, &pcbMaxLength, &pcbCurrentLength);
		//cout << "pcbMaxLength="<< pcbMaxLength << "\tpcbCurrentLength=" <<pcbCurrentLength << "\n";

		//Return buffer as python format data
		*buff = new char[pcbCurrentLength];
		memcpy(*buff, ppbBuffer, pcbCurrentLength);

		ppBuffer->Unlock();
	}

	if(ppBuffer) ppBuffer->Release();
	return pcbCurrentLength;
}
// This is called each time the stream format has changed, and
// deletes and then re-creates the output sample (IMFSample*),
// according to the required format.
//
// Thread context: decoder thread
bool DecoderMF::CreateOutputSample()
{
	bool				ret = false;
	HRESULT				hr;
	MFT_OUTPUT_STREAM_INFO outputStreamInfo;
	IMFMediaBuffer*		mediaBuffer = NULL;

	if (m_outputSample != NULL)
	{
		m_outputSample->Release();
		m_outputSample = NULL;
	}

	hr = MFCreateSample(&m_outputSample);
	if (FAILED(hr))
		goto bail;

	hr = m_h264Decoder->GetOutputStreamInfo(0, &outputStreamInfo);
	if (FAILED(hr))
		goto bail;

	hr = MFCreateAlignedMemoryBuffer(outputStreamInfo.cbSize, MF_16_BYTE_ALIGNMENT, &mediaBuffer);
	if (FAILED(hr))
		goto bail;

	hr = m_outputSample->AddBuffer(mediaBuffer);
	if (FAILED(hr))
		goto bail;

	ret = true;
	
bail:
	if (ret == false)
	{
		if (m_outputSample != NULL)
		{
			m_outputSample->Release();
			m_outputSample = NULL;
		}
	}

	if (mediaBuffer)
		mediaBuffer->Release();

	return ret;
}
QVideoFrame MFTransform::makeVideoFrame()
{
    QVideoFrame frame;

    if (!m_format.isValid())
        return frame;

    IMFMediaBuffer *buffer = 0;

    do {
        if (FAILED(m_sample->ConvertToContiguousBuffer(&buffer)))
            break;

        QByteArray array = dataFromBuffer(buffer, m_format.frameHeight(), &m_bytesPerLine);
        if (array.isEmpty())
            break;

        // Wrapping IMFSample or IMFMediaBuffer in a QVideoFrame is not possible because we cannot hold
        // IMFSample for a "long" time without affecting the rest of the topology.
        // If IMFSample is held for more than 5 frames decoder starts to reuse it even though it hasn't been released it yet.
        // That is why we copy data from IMFMediaBuffer here.
        frame = QVideoFrame(new QMemoryVideoBuffer(array, m_bytesPerLine), m_format.frameSize(), m_format.pixelFormat());

        // WMF uses 100-nanosecond units, Qt uses microseconds
        LONGLONG startTime = -1;
        if (SUCCEEDED(m_sample->GetSampleTime(&startTime))) {
            frame.setStartTime(startTime * 0.1);

            LONGLONG duration = -1;
            if (SUCCEEDED(m_sample->GetSampleDuration(&duration)))
                frame.setEndTime((startTime + duration) * 0.1);
        }
    } while (false);

    if (buffer)
        buffer->Release();

    return frame;
}
// Process any pending output from the decoder (until all output is
// processed).
//
// Thread context: decoder thread
bool DecoderMF::DoProcessOutput()
{
	bool						ret = false;
	HRESULT						hr;
	MFT_OUTPUT_DATA_BUFFER		mftDataBuffer;
	DWORD						mftStatus;
	bool						moreOutput;

	if (m_outputSample == NULL)
	{
		if (! CreateOutputSample())
			return false;
	}

	do
	{
		// Since we could be looping inside this method for a while,
		// if a whole stack of frames arrive at once, we want to exit
		// if the thread has been asked to die. So check on each
		// iteration of the loop.
		if (! m_decoderThreadRunning)
			return true;

		moreOutput = false;

		mftDataBuffer.dwStreamID = 0;
		mftDataBuffer.pSample = m_outputSample;
		mftDataBuffer.dwStatus = 0;
		mftDataBuffer.pEvents = NULL;
		mftStatus = 0;

		// Looks like we have to reset the sample before use:
		IMFMediaBuffer* mediaBuffer;
		hr = m_outputSample->GetBufferByIndex(0, &mediaBuffer);
		if (FAILED(hr))
			goto bail;

		hr = mediaBuffer->SetCurrentLength(0);
		if (FAILED(hr))
			goto bail;

		mediaBuffer->Release();

		hr = m_h264Decoder->ProcessOutput(0, 1, &mftDataBuffer, &mftStatus);

		// Check return code
		if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
			break;
		EnterCriticalSection(&m_criticalSection);
		if (hr == MF_E_TRANSFORM_STREAM_CHANGE || !m_previewConfigured)
		{
			// If the output format has changed, we need to handle
			// the stream change. This will happen after the first
			// few packets have been delivered.
			moreOutput = HandleStreamChange();
			LeaveCriticalSection(&m_criticalSection);
			if (!moreOutput)
				goto bail;
			continue;
		}
		LeaveCriticalSection(&m_criticalSection);
		if (FAILED(hr))
			goto bail;

		if (mftDataBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE)
			moreOutput = true;

		// Process each event:
		if (mftDataBuffer.pEvents != NULL)
		{
			DWORD numElements;
			hr = mftDataBuffer.pEvents->GetElementCount(&numElements);
			if (SUCCEEDED(hr))
			{
				for (DWORD i = 0; i < numElements; i++)
				{
					IUnknown* iunk = NULL;

					hr = mftDataBuffer.pEvents->GetElement(i, &iunk);
					if (SUCCEEDED(hr))
					{
						IMFMediaEvent* mediaEvent = NULL;
						hr = iunk->QueryInterface(IID_IMFMediaEvent, (void**)&mediaEvent);

						if (SUCCEEDED(hr))
						{
							OutputDebugString(_T("FIXME: process event!\n"));

							mediaEvent->Release();
						}

						iunk->Release();
					}
				}
			}

			mftDataBuffer.pEvents = NULL;
		}

		// Process sample:
		if (mftDataBuffer.pSample != NULL)
		{
			IMFMediaBuffer* mediaBuffer;
			hr = mftDataBuffer.pSample->GetBufferByIndex(0, &mediaBuffer);
			if (FAILED(hr))
				goto bail;

			EnterCriticalSection(&m_criticalSection);

				if (m_previewWindow != NULL && m_previewConfigured)
					m_previewWindow->DrawFrame(mediaBuffer);

			LeaveCriticalSection(&m_criticalSection);

			mediaBuffer->Release();
		}
	} while(moreOutput);

	ret = true;

bail:
	if (ret == false)
		OutputDebugString(_T("ERROR: failed to process output...\n"));

	return ret;
}
// Process the incomming NAL from the queue: wraps it up into a
// IMFMediaSample, sends it to the decoder.
//
// Thread context: decoder thread
bool DecoderMF::DoProcessInputNAL(IBMDStreamingH264NALPacket* nalPacket)
{
	bool				ret = false;
	HRESULT				hr;
	IMFMediaBuffer*		newBuffer = NULL;
	BYTE*				newBufferPtr;
	void*				nalPacketPtr;
	//
	IMFSample*			newSample = NULL;
	ULONGLONG			nalPresentationTime;
	const BYTE			nalPrefix[] = {0, 0, 0, 1};

	// Get a pointer to the NAL data
	hr = nalPacket->GetBytes(&nalPacketPtr);
	if (FAILED(hr))
		goto bail;

	// Create the MF media buffer (+ 4 bytes for the NAL Prefix (0x00 0x00 0x00 0x01)
	// which MF requires.
	hr = MFCreateMemoryBuffer(nalPacket->GetPayloadSize()+4, &newBuffer);
	if (FAILED(hr))
		goto bail;

	// Lock the MF media buffer
	hr = newBuffer->Lock(&newBufferPtr, NULL, NULL);
	if (FAILED(hr))
		goto bail;

	// Copy the prefix and the data
	memcpy(newBufferPtr, nalPrefix, 4);
	memcpy(newBufferPtr+4, nalPacketPtr, nalPacket->GetPayloadSize());

	// Unlock the MF media buffer
	hr = newBuffer->Unlock();
	if (FAILED(hr))
		goto bail;

	// Update the current length of the MF media buffer
	hr = newBuffer->SetCurrentLength(nalPacket->GetPayloadSize()+4);
	if (FAILED(hr))
		goto bail;

	// We now have a IMFMediaBuffer with the contents of the NAL
	// packet. We now construct a IMFSample with the buffer
	hr = MFCreateSample(&newSample);
	if (FAILED(hr))
		goto bail;

	hr = newSample->AddBuffer(newBuffer);
	if (FAILED(hr))
		goto bail;

	// Get the presentation (display) time in 100-nanosecond units
	// TODO: this is pretty meaningless without setting the start time.
	hr = nalPacket->GetDisplayTime(1000 * 1000 * 10, &nalPresentationTime);
	if (FAILED(hr))
		goto bail;

	// Set presentation time on the sample
	hr = newSample->SetSampleTime(nalPresentationTime);
	if (FAILED(hr))
		goto bail;

	// Now parse it to the decoder
	for (;;)
	{
		hr = m_h264Decoder->ProcessInput(0, newSample, 0);
		if (hr == S_OK)
			break;
		if (hr != MF_E_NOTACCEPTING || DoProcessOutput() == false)
			goto bail;
	}

	ret = true;

bail:
	if (newBuffer != NULL)
		newBuffer->Release();

	if (newSample != NULL)
		newSample->Release();

	return ret;
}
示例#7
0
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){

	String url=PathToFilePath( path );
	
	DXASS( MFStartup( MF_VERSION ) );
	
	IMFAttributes *attrs;
	DXASS( MFCreateAttributes( &attrs,1 ) );
	DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) );
	
	IMFSourceReader *reader;
	DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) );
	
	attrs->Release();

	IMFMediaType *mediaType;
	DXASS( MFCreateMediaType( &mediaType ) );
	DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) );
	DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) );

	DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) );
    
	mediaType->Release();

	IMFMediaType *outputMediaType;
	DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) );
	
	WAVEFORMATEX *wformat;
	uint32 formatByteCount=0;
	DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) );

	*channels=wformat->nChannels;
	*format=wformat->wBitsPerSample/8;
	*hertz=wformat->nSamplesPerSec;

	CoTaskMemFree( wformat );
    
	outputMediaType->Release();
/*    
	PROPVARIANT var;
	DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) );
	LONGLONG duration=var.uhVal.QuadPart;
	float64 durationInSeconds=(duration / (float64)(10000 * 1000));
	m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec );
*/
	std::vector<unsigned char*> bufs;
	std::vector<uint32> lens;
	uint32 len=0;
    
	for( ;; ){
		uint32 flags=0;
		IMFSample *sample;
		DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) );
		
		if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){
			break;
		}
		if( sample==0 ){ 
			abort();
		}
		
		IMFMediaBuffer *mediaBuffer;
		DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) );

		uint8 *audioData=0;
		uint32 sampleBufferLength=0;
		DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) );
		
		unsigned char *buf=(unsigned char*)malloc( sampleBufferLength );
		memcpy( buf,audioData,sampleBufferLength );
		
		bufs.push_back( buf );
		lens.push_back( sampleBufferLength );
		len+=sampleBufferLength;
		
		DXASS( mediaBuffer->Unlock() );
		mediaBuffer->Release();
		
		sample->Release();
	}
	
	reader->Release();
	
	*length=len/(*channels * *format);

	unsigned char *data=(unsigned char*)malloc( len );
	unsigned char *p=data;
	
	for( int i=0;i<bufs.size();++i ){
		memcpy( p,bufs[i],lens[i] );
		free( bufs[i] );
		p+=lens[i];
	}
	
	gc_force_sweep=true;
	
	return data;
}	
示例#8
0
void VideoCompressor::AudioSample32Bit2Channel(float *Samples, UINT FrameCount, UINT64 CaptureStartTime)
{
    //double TimeInSeconds = _Clock->Elapsed();

    const UINT SamplesPerSecond = 44100;
    const UINT ChannelCount = 2;
    const UINT SampleCount = FrameCount * ChannelCount;
    const UINT BitsPerSample = 16;
    const UINT BufferLength = BitsPerSample / 8 * ChannelCount * FrameCount;
    const LONGLONG SampleDuration = LONGLONG(FrameCount) * LONGLONG(10000000) / SamplesPerSecond; // in hns
    
    //
    // Write some data
    //
    IMFSample *spSample;
    IMFMediaBuffer *spBuffer;
    BYTE *pbBuffer = NULL;

    //
    // Create a media sample
    //

    HRESULT hr = MFCreateSample( &spSample );
    hr = spSample->SetSampleDuration( SampleDuration );
    
    //hr = spSample->SetSampleTime( LONGLONG( TimeInSeconds * 10000000.0 ) );
    
    //CaptureStartTime = 10,000,000 * t / f;
    //t = CaptureStartTime * f / 10,000,000
    LONGLONG FileStartCounter = _Clock->StartTime();
    LONGLONG CaptureStartCounter = CaptureStartTime * _Clock->TicksPerSecond() / LONGLONG(10000000);
    hr = spSample->SetSampleTime( ( CaptureStartCounter - FileStartCounter ) * LONGLONG(10000000) / _Clock->TicksPerSecond() );

    //
    // Add a media buffer filled with random data
    //

    hr = MFCreateMemoryBuffer( BufferLength, &spBuffer );
    hr = spBuffer->SetCurrentLength( BufferLength );
    hr = spSample->AddBuffer( spBuffer );

    hr = spBuffer->Lock( &pbBuffer, NULL, NULL );
    __int16 *OutputAudioBuffer = (__int16 *)pbBuffer;
    for(UINT SampleIndex = 0; SampleIndex < SampleCount; SampleIndex++)
    {
        //
        // Floats are in the range -1 to 1
        //
        OutputAudioBuffer[SampleIndex] = int(Samples[SampleIndex] * 32768.0f);
    }
    hr = spBuffer->Unlock();

    //
    // Write the media sample
    //
    hr = _Writer->WriteSample( 1, spSample );
    PersistentAssert(SUCCEEDED(hr), "WriteSample failed");

    spSample->Release();
    spBuffer->Release();
}
STDMETHODIMP MFTransform::ProcessOutput(DWORD dwFlags, DWORD cOutputBufferCount, MFT_OUTPUT_DATA_BUFFER *pOutputSamples, DWORD *pdwStatus)
{
    if (dwFlags != 0)
        return E_INVALIDARG;

    if (pOutputSamples == NULL || pdwStatus == NULL)
        return E_POINTER;

    if (cOutputBufferCount != 1)
        return E_INVALIDARG;

    QMutexLocker locker(&m_mutex);

    if (!m_sample)
        return MF_E_TRANSFORM_NEED_MORE_INPUT;

    IMFMediaBuffer *input = NULL;
    IMFMediaBuffer *output = NULL;

    DWORD sampleLength = 0;
    m_sample->GetTotalLength(&sampleLength);

    // If the sample length is null, it means we're getting DXVA buffers.
    // In that case just pass on the sample we got as input.
    // Otherwise we need to copy the input buffer into the buffer the sink
    // is giving us.
    if (pOutputSamples[0].pSample && sampleLength > 0) {

        if (FAILED(m_sample->ConvertToContiguousBuffer(&input)))
            goto done;

        if (FAILED(pOutputSamples[0].pSample->ConvertToContiguousBuffer(&output)))
            goto done;

        DWORD inputLength = 0;
        DWORD outputLength = 0;
        input->GetMaxLength(&inputLength);
        output->GetMaxLength(&outputLength);

        if (outputLength < inputLength) {
            pOutputSamples[0].pSample->RemoveAllBuffers();
            output->Release();
            output = NULL;
            if (SUCCEEDED(MFCreateMemoryBuffer(inputLength, &output)))
                pOutputSamples[0].pSample->AddBuffer(output);
        }

        if (output)
            m_sample->CopyToBuffer(output);

        LONGLONG hnsDuration = 0;
        LONGLONG hnsTime = 0;
        if (SUCCEEDED(m_sample->GetSampleDuration(&hnsDuration)))
            pOutputSamples[0].pSample->SetSampleDuration(hnsDuration);
        if (SUCCEEDED(m_sample->GetSampleTime(&hnsTime)))
            pOutputSamples[0].pSample->SetSampleTime(hnsTime);


    } else {
        if (pOutputSamples[0].pSample)
            pOutputSamples[0].pSample->Release();
        pOutputSamples[0].pSample = m_sample;
        pOutputSamples[0].pSample->AddRef();
    }

done:
    pOutputSamples[0].dwStatus = 0;
    *pdwStatus = 0;

    m_sample->Release();
    m_sample = 0;

    if (input)
        input->Release();
    if (output)
        output->Release();

    return S_OK;
}