Beispiel #1
1
HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt)
{
	//printf("Get Sample...\n");

	IMFSample *videoSample = NULL;

	// Initial read results in a null pSample??
	CHECK_HR(videoReader->ReadSample(
		MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index. 
		&flags,                         // Receives status flags.
		&llVideoTimeStamp,                   // Receives the time stamp.
		&videoSample                        // Receives the sample or NULL.
		), L"Error reading video sample.");

	if (!videoSample)
	{
		printf("Failed to get video sample from MF.\n");
	}
	else
	{
		DWORD nCurrBufferCount = 0;
		CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n");

		IMFMediaBuffer * pMediaBuffer;
		CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n");

		DWORD nCurrLen = 0;
		CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n");

		byte *imgBuff;
		DWORD buffCurrLen = 0;
		DWORD buffMaxLen = 0;
		pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen);
		
		/*BYTE *i420 = new BYTE[4608000];
		YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420);
		vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/
		
		vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff);
		
		const vpx_codec_cx_pkt_t * pkt;
		vpx_enc_frame_flags_t flags = 0;
		
		if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) {
			printf("VPX codec failed to encode the frame.\n");
			return -1;
		}
		else {
			vpx_codec_iter_t iter = NULL;

			while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) {
				switch (pkt->kind) {
				case VPX_CODEC_CX_FRAME_PKT:                                
					vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt);
					break;
				default:
					break;
				}

				printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz);
			}
		}

		_sampleCount++;

		vpx_img_free(img);

		pMediaBuffer->Unlock();
		pMediaBuffer->Release();

		//delete i420;

		videoSample->Release();

		return S_OK;
	}
}
Beispiel #2
0
//-------------------------------------------------------------------
// Write the sample 
//
HRESULT VidWriter::writeFrame(BYTE *pData)
{
	HRESULT hr;
    IMFSample *pSample = NULL;
    const DWORD cbBuffer = 4 * m_width * m_height;

	// Unlock the buffer
    if (m_pBuffer) m_pBuffer->Unlock();	

	// Set the data length of the buffer
    hr = m_pBuffer->SetCurrentLength(cbBuffer);
	if (FAILED(hr)) goto done;	

    // Create a media sample and add the buffer to it
    hr = MFCreateSample(&pSample);
	if (FAILED(hr)) goto done;
	hr = pSample->AddBuffer(m_pBuffer);
	if (FAILED(hr)) goto done;

    // Set the time stamp and the duration
    hr = pSample->SetSampleTime(m_rtStart);
	if (FAILED(hr)) goto done;
	hr = pSample->SetSampleDuration(m_frametime);
	if (FAILED(hr)) goto done;

	// increment the time stamp
	m_rtStart += m_frametime;

    // Send the sample to the Sink Writer
    hr = m_pWriter->WriteSample(m_streamIndex, pSample);

done:
    SafeRelease(&pSample);
    return hr;
}
Beispiel #3
0
//-------------------------------------------------------------------
// Read a frame and provide access to the data
//
HRESULT VidReader::getReadBuffer(BYTE **ppData)
{
    HRESULT     hr = S_OK;
    DWORD       dwFlags = 0;
    DWORD       cbBitmapData = 0;       // Size of data, in bytes
	IMFSample	*pSample;

	if (!m_pReader) return E_ABORT; // if no source reader run away

    while (1)
    {
        hr = m_pReader->ReadSample(
            (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
            0, NULL, &dwFlags, &m_timestamp, &pSample );

        if (FAILED(hr)) goto done;

        if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM)
        {
            break;
        }

        if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
        {
            // Type change. Get the new format.
            hr = getVideoFormat();
            if (FAILED(hr)) goto done;
        }

        if (pSample == NULL)
        {
            continue;
        }

        // We got a sample.
        break;
	}

    if (pSample)
    {
        UINT32 pitch = 4 * m_imagewidth; 

        hr = pSample->ConvertToContiguousBuffer(&m_pBuffer);
        if (FAILED(hr)) goto done;

		hr = m_pBuffer->Lock(ppData, NULL, &cbBitmapData);
        if (FAILED(hr)) goto done;

        assert(cbBitmapData == (pitch * m_imageheight));
    }
    else
    {
        hr = MF_E_END_OF_STREAM;
    }

done:
	SafeRelease(&pSample);
	return hr;
}
HRESULT D3DPresentEngine::createVideoSamples(IMFMediaType *format, QList<IMFSample*> &videoSampleQueue)
{
    if (!format)
        return MF_E_UNEXPECTED;

    HRESULT hr = S_OK;
    D3DPRESENT_PARAMETERS pp;

    IDirect3DSwapChain9 *swapChain = NULL;
    IMFSample *videoSample = NULL;

    QMutexLocker locker(&m_mutex);

    releaseResources();

    // Get the swap chain parameters from the media type.
    hr = getSwapChainPresentParameters(format, &pp);
    if (FAILED(hr))
        goto done;

    // Create the video samples.
    for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) {
        // Create a new swap chain.
        hr = m_device->CreateAdditionalSwapChain(&pp, &swapChain);
        if (FAILED(hr))
            goto done;

        // Create the video sample from the swap chain.
        hr = createD3DSample(swapChain, &videoSample);
        if (FAILED(hr))
            goto done;

        // Add it to the list.
        videoSample->AddRef();
        videoSampleQueue.append(videoSample);

        // Set the swap chain pointer as a custom attribute on the sample. This keeps
        // a reference count on the swap chain, so that the swap chain is kept alive
        // for the duration of the sample's lifetime.
        hr = videoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, swapChain);
        if (FAILED(hr))
            goto done;

        qt_wmf_safeRelease(&videoSample);
        qt_wmf_safeRelease(&swapChain);
    }

done:
    if (FAILED(hr))
        releaseResources();

    qt_wmf_safeRelease(&swapChain);
    qt_wmf_safeRelease(&videoSample);
    return hr;
}
Beispiel #5
0
HRESULT
WWMFResampler::ConvertWWSampleDataToMFSample(WWMFSampleData &sampleData, IMFSample **ppSample)
{
    HRESULT hr = S_OK;
    IMFSample *pSample = NULL;
#ifdef USE_ATL
    CComPtr<IMFMediaBuffer> spBuffer;
#else // USE_ATL
    IMFMediaBuffer *spBuffer = NULL;
#endif // USE_ATL
    BYTE  *pByteBufferTo = NULL;
    //LONGLONG hnsSampleDuration;
    //LONGLONG hnsSampleTime;
    int frameCount;

    assert(ppSample);
    *ppSample = NULL;

    HRG(MFCreateMemoryBuffer(sampleData.bytes, &spBuffer));
    HRG(spBuffer->Lock(&pByteBufferTo, NULL, NULL));

    memcpy(pByteBufferTo, sampleData.data, sampleData.bytes);

    pByteBufferTo = NULL;
    HRG(spBuffer->Unlock());
    HRG(spBuffer->SetCurrentLength(sampleData.bytes));

    HRG(MFCreateSample(&pSample));
    HRG(pSample->AddBuffer(spBuffer));

    frameCount = sampleData.bytes / m_inputFormat.FrameBytes();
    /*
    hnsSampleDuration = (LONGLONG)(10.0 * 1000 * 1000 * frameCount        / m_inputFormat.sampleRate);
    hnsSampleTime     = (LONGLONG)(10.0 * 1000 * 1000 * m_inputFrameTotal / m_inputFormat.sampleRate);
    HRG(pSample->SetSampleDuration(hnsSampleDuration));
    HRG(pSample->SetSampleTime(hnsSampleTime));
    */

    m_inputFrameTotal += frameCount;

    // succeeded.

    *ppSample = pSample;
    pSample = NULL; //< prevent release

end:
    SafeRelease(&pSample);
#ifndef USE_ATL
    SafeRelease(&spBuffer);
#endif // uSE_ATL
    return hr;
}
Beispiel #6
0
HRESULT VideoEncoder::WriteTransitionSample(UINT64 sampleDuration, TransitionBase* pTransition, DWORD streamIndex, LONGLONG* startTime)
{
	HRESULT hr = S_OK;
	IMFMediaBuffer* pMediaBuffer = nullptr;
	BYTE* pFrameBuffer = nullptr;
	IMFSample* pSample = nullptr;
	BYTE* pOutputFrame = nullptr;

	for (DWORD i = 0; i < sampleDuration; i++)
    {
		CheckHR(MFCreateMemoryBuffer(this->m_frameBufferSize, &pMediaBuffer));
		pMediaBuffer->Lock(&pFrameBuffer, nullptr, nullptr);
		float time = (float)i / (float)sampleDuration;
		pOutputFrame = pTransition->GetOutputFrame(time);
		CheckHR(MFCopyImage(pFrameBuffer, this->m_frameStride, pOutputFrame, this->m_frameStride, this->m_frameStride, this->m_frameHeight));
		CheckHR(pMediaBuffer->Unlock());
		CheckHR(pMediaBuffer->SetCurrentLength(this->m_frameBufferSize));
		CheckHR(MFCreateSample(&pSample));
		CheckHR(pSample->AddBuffer(pMediaBuffer));
		CheckHR(pSample->SetSampleTime(*startTime));
		CheckHR(pSample->SetSampleDuration(this->GetFrameDuration()));
		CheckHR(this->m_pSinkWriter->WriteSample(streamIndex, pSample));
		(*startTime) += this->GetFrameDuration();

		// 释放示例资源.
		SafeRelease(&pMediaBuffer);
		SafeRelease(&pSample);
		if (pOutputFrame != nullptr)
		{
			delete pOutputFrame;
			pOutputFrame = nullptr;
		}
	}

cleanup:
	if (!SUCCEEDED(hr))
	{
		DWORD error = GetLastError();
		this->m_logFileStream << "意外错误: " << error << endl;
	}
	SafeRelease(&pMediaBuffer);
	SafeRelease(&pSample);
	if (pOutputFrame != nullptr)
	{
		delete pOutputFrame;
		pOutputFrame = nullptr;
	}
	return hr;
}
void D3DPresentEngine::presentSample(void *opaque, qint64)
{
    HRESULT hr = S_OK;

    IMFSample *sample = reinterpret_cast<IMFSample*>(opaque);
    IMFMediaBuffer* buffer = NULL;
    IDirect3DSurface9* surface = NULL;

    if (m_surface && m_surface->isActive()) {
        if (sample) {
            // Get the buffer from the sample.
            hr = sample->GetBufferByIndex(0, &buffer);
            if (FAILED(hr))
                goto done;

            // Get the surface from the buffer.
            hr = MFGetService(buffer, MR_BUFFER_SERVICE, IID_PPV_ARGS(&surface));
            if (FAILED(hr))
                goto done;
        }

        if (surface && updateTexture(surface)) {
            QVideoFrame frame = QVideoFrame(new TextureVideoBuffer(m_glTexture),
                                            m_surfaceFormat.frameSize(),
                                            m_surfaceFormat.pixelFormat());

            // WMF uses 100-nanosecond units, Qt uses microseconds
            LONGLONG startTime = -1;
            if (SUCCEEDED(sample->GetSampleTime(&startTime))) {
                frame.setStartTime(startTime * 0.1);

                LONGLONG duration = -1;
                if (SUCCEEDED(sample->GetSampleDuration(&duration)))
                    frame.setEndTime((startTime + duration) * 0.1);
            }

            m_surface->present(frame);
        }
    }

done:
    qt_wmf_safeRelease(&surface);
    qt_wmf_safeRelease(&buffer);
    qt_wmf_safeRelease(&sample);
}
Beispiel #8
0
IMFSample* create_sample(void *data, DWORD len, DWORD alignment, LONGLONG duration) {
	HRESULT hr = S_OK;
	IMFMediaBuffer *buf = NULL;
	IMFSample *sample = NULL;

	hr = MFCreateSample(&sample);
	if (FAILED(hr)) {
		ReportError(L"Unable to allocate a sample", hr);
		return NULL;
	}
	// Yes, the argument for alignment is the actual alignment - 1
	hr = MFCreateAlignedMemoryBuffer(len, alignment - 1, &buf);
	if (FAILED(hr)) { 
		ReportError(L"Unable to allocate a memory buffer for sample", hr);
		return NULL;
	}
	if (data)
	{
		BYTE *buffer;
		// lock the MediaBuffer
		// this is actually not a thread-safe lock
		hr = buf->Lock(&buffer, NULL, NULL);
		if (FAILED(hr))
		{
			SafeRelease(&sample);
			SafeRelease(&buf);
			return NULL;
		}

		memcpy(buffer, data, len);

		buf->SetCurrentLength(len);
		buf->Unlock();
	}

	sample->AddBuffer(buf);
	hr = sample->SetSampleDuration(duration);
	SafeRelease(&buf);
	return sample;
}
Beispiel #9
0
int camera_capture(camera_t *cam, unsigned char *outdata)
{
	camera_internal_t *camera = (camera_internal_t*)cam;
	if (!camera->reader)
		return 1; // error should be zero...

	HRESULT hr = S_OK;

	IMFSample *pSample = NULL;
	DWORD streamIndex = 0, flags = 0;
	LONGLONG llTimeStamp = 0;

	// skip one
	hr = camera->reader->ReadSample(
		(DWORD)MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index.
		&flags,                         // Receives status flags.
		&llTimeStamp,                   // Receives the time stamp.
		&pSample                        // Receives the sample or NULL.
		);
	SafeRelease(&pSample);

	hr = camera->reader->ReadSample(
		(DWORD)MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index.
		&flags,                         // Receives status flags.
		&llTimeStamp,                   // Receives the time stamp.
		&pSample                        // Receives the sample or NULL.
		);

	IMFMediaBuffer *mediaBuffer = NULL;
	BYTE *pData = NULL;
	DWORD pLength = 0;
	pSample->ConvertToContiguousBuffer(&mediaBuffer);
	hr = mediaBuffer->Lock(&pData, NULL, &pLength);
	//console_printf("camera: length: %d, WxH: %d, 2WxH: %d, 3WxH: %d\n", pLength, WIDTH * HEIGHT, 2 * WIDTH * HEIGHT, 3 * WIDTH * HEIGHT);
	/*unsigned char *rgbData = new unsigned char[WIDTH * HEIGHT * (24 / 8)];
	unsigned char *yuy2Ptr = pData, *rgbPtr = rgbData;
	for (UINT32 j = 0; j < HEIGHT; j++)
	{
		for (UINT32 i = 0; i < WIDTH / 2; ++i)
		{
			int y0 = yuy2Ptr[0];
			int u0 = yuy2Ptr[1];
			int y1 = yuy2Ptr[2];
			int v0 = yuy2Ptr[3];
			yuy2Ptr += 4;
			int c = y0 - 16;
			int d = u0 - 128;
			int e = v0 - 128;
			rgbPtr[0] = clip((298 * c + 516 * d + 128) >> 8); // blue
			rgbPtr[1] = clip((298 * c - 100 * d - 208 * e + 128) >> 8); // green
			rgbPtr[2] = clip((298 * c + 409 * e + 128) >> 8); // red
			c = y1 - 16;
			rgbPtr[3] = clip((298 * c + 516 * d + 128) >> 8); // blue
			rgbPtr[4] = clip((298 * c - 100 * d - 208 * e + 128) >> 8); // green
			rgbPtr[5] = clip((298 * c + 409 * e + 128) >> 8); // red
			rgbPtr += 6;
		}
	}*/

	unsigned char *in = pData + camera->size.width * (camera->size.height - 1) * 3, *out = outdata;
	for (UINT32 j = 0; j < camera->size.height; j++)
	{
		for (UINT32 i = 0; i < camera->size.width; ++i)
		{
			out[2] = in[0];
			out[1] = in[1];
			out[0] = in[2];
			in += 3; out += 3;
		}
		in -= 2 * camera->size.width * 3;
	}

	mediaBuffer->Unlock();
	SafeRelease(&pSample);
	SafeRelease(&mediaBuffer);
	return 1;
}
SINT SoundSourceMediaFoundation::readSampleFrames(
        SINT numberOfFrames, CSAMPLE* sampleBuffer) {

    SINT numberOfFramesRemaining = numberOfFrames;
    CSAMPLE* pSampleBuffer = sampleBuffer;

    while (numberOfFramesRemaining > 0) {
        SampleBuffer::ReadableChunk readableChunk(
                m_sampleBuffer.readFromHead(
                        frames2samples(numberOfFramesRemaining)));
        DEBUG_ASSERT(readableChunk.size()
                <= frames2samples(numberOfFramesRemaining));
        if (readableChunk.size() > 0) {
            DEBUG_ASSERT(m_currentFrameIndex < getMaxFrameIndex());
            if (sampleBuffer != nullptr) {
                SampleUtil::copy(
                        pSampleBuffer,
                        readableChunk.data(),
                        readableChunk.size());
                pSampleBuffer += readableChunk.size();
            }
            m_currentFrameIndex += samples2frames(readableChunk.size());
            numberOfFramesRemaining -= samples2frames(readableChunk.size());
        }
        if (numberOfFramesRemaining == 0) {
            break; // finished reading
        }

        // No more decoded sample frames available
        DEBUG_ASSERT(m_sampleBuffer.isEmpty());

        if (m_pSourceReader == nullptr) {
            break; // abort if reader is dead
        }

        DWORD dwFlags = 0;
        LONGLONG streamPos = 0;
        IMFSample* pSample = nullptr;
        HRESULT hrReadSample =
                m_pSourceReader->ReadSample(
                        kStreamIndex, // [in]  DWORD dwStreamIndex,
                        0,            // [in]  DWORD dwControlFlags,
                        nullptr,      // [out] DWORD *pdwActualStreamIndex,
                        &dwFlags,     // [out] DWORD *pdwStreamFlags,
                        &streamPos,   // [out] LONGLONG *pllTimestamp,
                        &pSample);    // [out] IMFSample **ppSample
        if (FAILED(hrReadSample)) {
            qWarning() << kLogPreamble
                    << "IMFSourceReader::ReadSample() failed"
                    << hrReadSample
                    << "-> abort decoding";
            DEBUG_ASSERT(pSample == nullptr);
            break; // abort
        }
        if (dwFlags & MF_SOURCE_READERF_ERROR) {
            qWarning() << kLogPreamble
                    << "IMFSourceReader::ReadSample()"
                    << "detected stream errors"
                    << "(MF_SOURCE_READERF_ERROR)"
                    << "-> abort and stop decoding";
            DEBUG_ASSERT(pSample == nullptr);
            safeRelease(&m_pSourceReader); // kill the reader
            break; // abort
        } else if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
            DEBUG_ASSERT(pSample == nullptr);
            break; // finished reading
        } else if (dwFlags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) {
            qWarning() << kLogPreamble
                    << "IMFSourceReader::ReadSample()"
                    << "detected that the media type has changed"
                    << "(MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)"
                    << "-> abort decoding";
            DEBUG_ASSERT(pSample == nullptr);
            break; // abort
        }
        DEBUG_ASSERT(pSample != nullptr);
        SINT readerFrameIndex = m_streamUnitConverter.toFrameIndex(streamPos);
        DEBUG_ASSERT(
                (m_currentFrameIndex == getMaxFrameIndex()) || // unknown position after seeking
                (m_currentFrameIndex == readerFrameIndex));
        m_currentFrameIndex = readerFrameIndex;

        DWORD dwSampleBufferCount = 0;
        HRESULT hrGetBufferCount =
                pSample->GetBufferCount(&dwSampleBufferCount);
        if (FAILED(hrGetBufferCount)) {
            qWarning() << kLogPreamble
                    << "IMFSample::GetBufferCount() failed"
                    << hrGetBufferCount
                    << "-> abort decoding";
            safeRelease(&pSample);
            break; // abort
        }

        DWORD dwSampleTotalLengthInBytes = 0;
        HRESULT hrGetTotalLength = pSample->GetTotalLength(&dwSampleTotalLengthInBytes);
        if (FAILED(hrGetTotalLength)) {
            qWarning() << kLogPreamble
                    << "IMFSample::GetTotalLength() failed"
                    << hrGetTotalLength
                    << "-> abort decoding";
            safeRelease(&pSample);
            break; // abort
        }
        // Enlarge temporary buffer (if necessary)
        DEBUG_ASSERT((dwSampleTotalLengthInBytes % kBytesPerSample) == 0);
        SINT numberOfSamplesToBuffer =
            dwSampleTotalLengthInBytes / kBytesPerSample;
        SINT sampleBufferCapacity = m_sampleBuffer.getCapacity();
        DEBUG_ASSERT(sampleBufferCapacity > 0);
        while (sampleBufferCapacity < numberOfSamplesToBuffer) {
            sampleBufferCapacity *= 2;
        }
        if (m_sampleBuffer.getCapacity() < sampleBufferCapacity) {
            qDebug() << kLogPreamble
                    << "Enlarging sample buffer capacity"
                    << m_sampleBuffer.getCapacity()
                    << "->"
                    << sampleBufferCapacity;
            m_sampleBuffer.resetCapacity(sampleBufferCapacity);
        }

        DWORD dwSampleBufferIndex = 0;
        while (dwSampleBufferIndex < dwSampleBufferCount) {
            IMFMediaBuffer* pMediaBuffer = nullptr;
            HRESULT hrGetBufferByIndex = pSample->GetBufferByIndex(dwSampleBufferIndex, &pMediaBuffer);
            if (FAILED(hrGetBufferByIndex)) {
                qWarning() << kLogPreamble
                        << "IMFSample::GetBufferByIndex() failed"
                        << hrGetBufferByIndex
                        << "-> abort decoding";
                DEBUG_ASSERT(pMediaBuffer == nullptr);
                break; // prematurely exit buffer loop
            }

            CSAMPLE* pLockedSampleBuffer = nullptr;
            DWORD lockedSampleBufferLengthInBytes = 0;
            HRESULT hrLock = pMediaBuffer->Lock(
                    reinterpret_cast<quint8**>(&pLockedSampleBuffer),
                    nullptr,
                    &lockedSampleBufferLengthInBytes);
            if (FAILED(hrLock)) {
                qWarning() << kLogPreamble
                        << "IMFMediaBuffer::Lock() failed"
                        << hrLock
                        << "-> abort decoding";
                safeRelease(&pMediaBuffer);
                break; // prematurely exit buffer loop
            }

            DEBUG_ASSERT((lockedSampleBufferLengthInBytes % sizeof(pLockedSampleBuffer[0])) == 0);
            SINT lockedSampleBufferCount =
                    lockedSampleBufferLengthInBytes / sizeof(pLockedSampleBuffer[0]);
            SINT copySamplesCount = std::min(
                    frames2samples(numberOfFramesRemaining),
                    lockedSampleBufferCount);
            if (copySamplesCount > 0) {
                // Copy samples directly into output buffer if possible
                if (pSampleBuffer != nullptr) {
                    SampleUtil::copy(
                            pSampleBuffer,
                            pLockedSampleBuffer,
                            copySamplesCount);
                    pSampleBuffer += copySamplesCount;
                }
                pLockedSampleBuffer += copySamplesCount;
                lockedSampleBufferCount -= copySamplesCount;
                m_currentFrameIndex += samples2frames(copySamplesCount);
                numberOfFramesRemaining -= samples2frames(copySamplesCount);
            }
            // Buffer the remaining samples
            SampleBuffer::WritableChunk writableChunk(
                    m_sampleBuffer.writeToTail(lockedSampleBufferCount));
            // The required capacity has been calculated in advance (see above)
            DEBUG_ASSERT(writableChunk.size() == lockedSampleBufferCount);
            SampleUtil::copy(
                    writableChunk.data(),
                    pLockedSampleBuffer,
                    writableChunk.size());
            HRESULT hrUnlock = pMediaBuffer->Unlock();
            VERIFY_OR_DEBUG_ASSERT(SUCCEEDED(hrUnlock)) {
                qWarning() << kLogPreamble
                        << "IMFMediaBuffer::Unlock() failed"
                        << hrUnlock;
                // ignore and continue
            }
            safeRelease(&pMediaBuffer);
            ++dwSampleBufferIndex;
        }
        safeRelease(&pSample);
        if (dwSampleBufferIndex < dwSampleBufferCount) {
            // Failed to read data from all buffers -> kill the reader
            qWarning() << kLogPreamble
                    << "Failed to read all buffered samples"
                    << "-> abort and stop decoding";
            safeRelease(&m_pSourceReader);
            break; // abort
        }
    }

    return numberOfFrames - numberOfFramesRemaining;
}
HRESULT WavStream::CreateAudioSample(IMFSample **ppSample)
{
    HRESULT hr = S_OK;

    IMFMediaBuffer *pBuffer = NULL;
    IMFSample *pSample = NULL;

    DWORD       cbBuffer = 0;
    BYTE        *pData = NULL;
    LONGLONG    duration = 0;

    // Start with one second of data, rounded up to the nearest block.
    cbBuffer = AlignUp<DWORD>(m_pRiff->Format()->nAvgBytesPerSec, m_pRiff->Format()->nBlockAlign);

    // Don't request any more than what's left.
    cbBuffer = min(cbBuffer, m_pRiff->BytesRemainingInChunk());

    // Create the buffer.
    hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);

    // Get a pointer to the buffer memory.
    if (SUCCEEDED(hr))
    {   
        hr = pBuffer->Lock(&pData, NULL, NULL);
    }

    // Fill the buffer
    if (SUCCEEDED(hr))
    {   
        hr = m_pRiff->ReadDataFromChunk(pData, cbBuffer);
    }

    // Unlock the buffer.
    if (SUCCEEDED(hr))
    {   
        hr = pBuffer->Unlock();
        pData = NULL;
    }

    // Set the size of the valid data in the buffer.
    if (SUCCEEDED(hr))
    {   
        hr = pBuffer->SetCurrentLength(cbBuffer);
    }

    // Create a new sample and add the buffer to it.
    if (SUCCEEDED(hr))
    {   
        hr = MFCreateSample(&pSample);
    }

    if (SUCCEEDED(hr))
    {   
        hr = pSample->AddBuffer(pBuffer);
    }

    // Set the time stamps, duration, and sample flags.
    if (SUCCEEDED(hr))
    {   
        hr = pSample->SetSampleTime(m_rtCurrentPosition);
    }

    if (SUCCEEDED(hr))
    {   
        duration = AudioDurationFromBufferSize(m_pRiff->Format(), cbBuffer);
        hr = pSample->SetSampleDuration(duration);
    }

    // Set the discontinuity flag.
    if (SUCCEEDED(hr))
    {   
        if (m_discontinuity)
        {
            hr = pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
        }
    }

    if (SUCCEEDED(hr))
    {   
        // Update our current position.
        m_rtCurrentPosition += duration;

        // Give the pointer to the caller.
        *ppSample = pSample;
        (*ppSample)->AddRef();
    }

    if (pData && pBuffer)
    {
        hr = pBuffer->Unlock();
    }

    SafeRelease(&pBuffer);
    SafeRelease(&pSample);
    return hr;
}
HRESULT WavStream::RequestSample(IUnknown* pToken)
{
    if (m_pSource == NULL)
    {
        return E_UNEXPECTED;
    }

    HRESULT hr = S_OK;

    IMFMediaSource *pSource = NULL;
    IMFSample *pSample = NULL;  // Sample to deliver.

    EnterCriticalSection(&m_critSec);

    // Check if we are shut down.
    hr = CheckShutdown();

    // Check if we already reached the end of the stream.
    if (SUCCEEDED(hr))
    {   
        if (m_EOS)
        {
            hr = MF_E_END_OF_STREAM;
        }
    }

    // Check the source is stopped.
    // GetState does not hold the source's critical section. Safe to call.
    if (SUCCEEDED(hr))
    {   
        if (m_pSource->GetState() == WavSource::STATE_STOPPED)
        {
            hr = MF_E_INVALIDREQUEST;
        }
    }

    if (SUCCEEDED(hr))
    {   
        // Create a new audio sample.
        hr = CreateAudioSample(&pSample);
    }

    if (SUCCEEDED(hr))
    {   
        // If the caller provided a token, attach it to the sample as
        // an attribute. 

        // NOTE: If we processed sample requests asynchronously, we would
        // need to call AddRef on the token and put the token onto a FIFO
        // queue. See documenation for IMFMediaStream::RequestSample.
        if (pToken)
        {
            hr = pSample->SetUnknown(MFSampleExtension_Token, pToken);
        }
    }

    // If paused, queue the sample for later delivery. Otherwise, deliver the sample now.
    if (SUCCEEDED(hr))
    {   
        if (m_pSource->GetState() == WavSource::STATE_PAUSED)
        {
            hr = m_sampleQueue.Queue(pSample);
        }
        else
        {
            hr = DeliverSample(pSample);
        }
    }

    // Cache a pointer to the source, prior to leaving the critical section.
    if (SUCCEEDED(hr))
    {   
        pSource = m_pSource;
        pSource->AddRef();
    }

    LeaveCriticalSection(&m_critSec);


    // We only have one stream, so the end of the stream is also the end of the
    // presentation. Therefore, when we reach the end of the stream, we need to 
    // queue the end-of-presentation event from the source. Logically we would do 
    // this inside the CheckEndOfStream method. However, we cannot hold the
    // source's critical section while holding the stream's critical section, at
    // risk of deadlock. 

    if (SUCCEEDED(hr))
    {   
        if (m_EOS)
        {
            hr = pSource->QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, NULL);
        }
    }

    SafeRelease(&pSample);
    SafeRelease(&pSource);
    return hr;
}
Beispiel #13
0
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){

	String url=PathToFilePath( path );
	
	DXASS( MFStartup( MF_VERSION ) );
	
	IMFAttributes *attrs;
	DXASS( MFCreateAttributes( &attrs,1 ) );
	DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) );
	
	IMFSourceReader *reader;
	DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) );
	
	attrs->Release();

	IMFMediaType *mediaType;
	DXASS( MFCreateMediaType( &mediaType ) );
	DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) );
	DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) );

	DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) );
    
	mediaType->Release();

	IMFMediaType *outputMediaType;
	DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) );
	
	WAVEFORMATEX *wformat;
	uint32 formatByteCount=0;
	DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) );

	*channels=wformat->nChannels;
	*format=wformat->wBitsPerSample/8;
	*hertz=wformat->nSamplesPerSec;

	CoTaskMemFree( wformat );
    
	outputMediaType->Release();
/*    
	PROPVARIANT var;
	DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) );
	LONGLONG duration=var.uhVal.QuadPart;
	float64 durationInSeconds=(duration / (float64)(10000 * 1000));
	m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec );
*/
	std::vector<unsigned char*> bufs;
	std::vector<uint32> lens;
	uint32 len=0;
    
	for( ;; ){
		uint32 flags=0;
		IMFSample *sample;
		DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) );
		
		if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){
			break;
		}
		if( sample==0 ){ 
			abort();
		}
		
		IMFMediaBuffer *mediaBuffer;
		DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) );

		uint8 *audioData=0;
		uint32 sampleBufferLength=0;
		DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) );
		
		unsigned char *buf=(unsigned char*)malloc( sampleBufferLength );
		memcpy( buf,audioData,sampleBufferLength );
		
		bufs.push_back( buf );
		lens.push_back( sampleBufferLength );
		len+=sampleBufferLength;
		
		DXASS( mediaBuffer->Unlock() );
		mediaBuffer->Release();
		
		sample->Release();
	}
	
	reader->Release();
	
	*length=len/(*channels * *format);

	unsigned char *data=(unsigned char*)malloc( len );
	unsigned char *p=data;
	
	for( int i=0;i<bufs.size();++i ){
		memcpy( p,bufs[i],lens[i] );
		free( bufs[i] );
		p+=lens[i];
	}
	
	gc_force_sweep=true;
	
	return data;
}	
Beispiel #14
0
HRESULT PpboxStream::DispatchSamples()
{
    HRESULT hr = S_OK;
    BOOL bNeedData = FALSE;
    BOOL bEOS = FALSE;

    SourceLock lock(m_pSource);

    // An I/O request can complete after the source is paused, stopped, or
    // shut down. Do not deliver samples unless the source is running.
    if (m_state != STATE_STARTED)
    {
        return S_OK;
    }

    IMFSample *pSample = NULL;
    IUnknown  *pToken = NULL;

    // Deliver as many samples as we can.
    while (!m_Samples.IsEmpty() && !m_Requests.IsEmpty())
    {
        // Pull the next sample from the queue.
        hr = m_Samples.RemoveFront(&pSample);
        if (FAILED(hr))
        {
            goto done;
        }

        // Pull the next request token from the queue. Tokens can be NULL.
        hr = m_Requests.RemoveFront(&pToken);
        if (FAILED(hr))
        {
            goto done;
        }

        if (pToken)
        {
            // Set the token on the sample.
            hr = pSample->SetUnknown(MFSampleExtension_Token, pToken);
            if (FAILED(hr))
            {
                goto done;
            }
        }

        // Send an MEMediaSample event with the sample.
        hr = m_pEventQueue->QueueEventParamUnk(
            MEMediaSample, GUID_NULL, S_OK, pSample);

        if (FAILED(hr))
        {
            goto done;
        }

        SafeRelease(&pSample);
        SafeRelease(&pToken);
    }

    if (m_Samples.IsEmpty() && m_bEOS)
    {
        // The sample queue is empty AND we have reached the end of the source
        // stream. Notify the pipeline by sending the end-of-stream event.

        hr = m_pEventQueue->QueueEventParamVar(
            MEEndOfStream, GUID_NULL, S_OK, NULL);

        if (FAILED(hr))
        {
            goto done;
        }

        // Notify the source. It will send the end-of-presentation event.
        hr = m_pSource->RequestSample();
        if (FAILED(hr))
        {
            goto done;
        }
    }
    else if (NeedsData())
    {
        // The sample queue is empty; the request queue is not empty; and we
        // have not reached the end of the stream. Ask for more data.
        hr = m_pSource->RequestSample();
        if (FAILED(hr))
        {
            goto done;
        }
    } else {
        hr = S_OK;
        goto done;
    }

done:
    if (FAILED(hr) && (m_state != STATE_SHUTDOWN))
    {
        // An error occurred. Send an MEError even from the source,
        // unless the source is already shut down.
        m_pSource->QueueEvent(MEError, GUID_NULL, hr, NULL);
    }

    SafeRelease(&pSample);
    SafeRelease(&pToken);
    return S_OK;
}
HRESULT MPEG1Stream::DispatchSamples()
{
    HRESULT hr = S_OK;
    BOOL bNeedData = FALSE;
    BOOL bEOS = FALSE;

    IMFSample *pSample = NULL;
    IUnknown  *pToken = NULL;

    SourceLock lock(m_pSource);

    // It's possible that an I/O request completed after the source
    // paused, stopped, or shut down. We should not deliver any samples
    // unless the source is running.
    if (m_state != STATE_STARTED)
    {
        hr = S_OK;
        goto done;
    }

    // Deliver as many samples as we can.
    while (!m_Samples.IsEmpty() && !m_Requests.IsEmpty())
    {
        // Pull the next sample from the queue.
        CHECK_HR(hr = m_Samples.RemoveFront(&pSample));

        // Pull the next request token from the queue. Tokens can be NULL.
        CHECK_HR(hr = m_Requests.RemoveFront(&pToken));

        if (pToken)
        {
            CHECK_HR(hr = pSample->SetUnknown(MFSampleExtension_Token, pToken));
        }

        CHECK_HR(hr = m_pEventQueue->QueueEventParamUnk(MEMediaSample, GUID_NULL, S_OK, pSample)); 

        SAFE_RELEASE(pSample);
        SAFE_RELEASE(pToken);
    }

    if (m_Samples.IsEmpty() && m_bEOS)
    {
        // The sample queue is empty AND we have reached the end of the source stream.
        // Notify the pipeline by sending the end-of-stream event.
        CHECK_HR(hr = m_pEventQueue->QueueEventParamVar(MEEndOfStream, GUID_NULL, S_OK, NULL));

        // Also notify the source, so that it can send the end-of-presentation event.
        CHECK_HR(hr = m_pSource->QueueAsyncOperation(SourceOp::OP_END_OF_STREAM));
    }
    else if (NeedsData())
    {
        // The sample queue is empty and the request queue is not empty (and we did not
        // reach the end of the stream). Ask the source for more data.
        CHECK_HR(hr = m_pSource->QueueAsyncOperation(SourceOp::OP_REQUEST_DATA));
    }

done:

    // If there was an error, queue MEError from the source (except after shutdown).
    if (FAILED(hr) && (m_state != STATE_SHUTDOWN))
    {
        m_pSource->QueueEvent(MEError, GUID_NULL, hr, NULL);
    }

    SAFE_RELEASE(pSample);
    SAFE_RELEASE(pToken);
    return S_OK;
}
HRESULT D3DPresentEngine::CreateVideoSamples(
    IMFMediaType *pFormat, 
    VideoSampleList& videoSampleQueue
    )
{
    if (m_hwnd == NULL)
    {
        return MF_E_INVALIDREQUEST;
    }

    if (pFormat == NULL)
    {
        return MF_E_UNEXPECTED;
    }

	HRESULT hr = S_OK;
	D3DPRESENT_PARAMETERS pp;

    IDirect3DSwapChain9 *pSwapChain = NULL;    // Swap chain
	IMFSample *pVideoSample = NULL;            // Sampl
	
    AutoLock lock(m_ObjectLock);

    ReleaseResources();

    // Get the swap chain parameters from the media type.
    CHECK_HR(hr = GetSwapChainPresentParameters(pFormat, &pp));

	if(m_pRenderSurface)
	{
		SAFE_RELEASE(m_pRenderSurface);
	}

	// Create the video samples.
    for (int i = 0; i < m_bufferCount; i++)
    {
        // Create a new swap chain.
        CHECK_HR(hr = m_pDevice->CreateAdditionalSwapChain(&pp, &pSwapChain));
        
        // Create the video sample from the swap chain.
        CHECK_HR(hr = CreateD3DSample(pSwapChain, &pVideoSample));

        // Add it to the list.
		CHECK_HR(hr = videoSampleQueue.InsertBack(pVideoSample));

        // Set the swap chain pointer as a custom attribute on the sample. This keeps
        // a reference count on the swap chain, so that the swap chain is kept alive
        // for the duration of the sample's lifetime.
        CHECK_HR(hr = pVideoSample->SetUnknown(MFSamplePresenter_SampleSwapChain, pSwapChain));

    	SAFE_RELEASE(pVideoSample);
        SAFE_RELEASE(pSwapChain);
    }

	// Let the derived class create any additional D3D resources that it needs.
    CHECK_HR(hr = OnCreateVideoSamples(pp));

done:
    if (FAILED(hr))
    {
        ReleaseResources();
    }
		
	SAFE_RELEASE(pSwapChain);
	SAFE_RELEASE(pVideoSample);
    return hr;
}
Beispiel #17
0
STDMETHODIMP CDecWMV9MFT::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BOOL bSyncPoint, BOOL bDiscontinuity, IMediaSample *pMediaSample)
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  hr = m_pMFT->GetInputStatus(0, &dwStatus);
  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> GetInputStatus() failed with hr: 0x%x", hr));
    return S_FALSE;
  }
  if (!(dwStatus & MFT_INPUT_STATUS_ACCEPT_DATA))
    return S_FALSE;

  if (m_vc1Header && (m_bManualReorder || m_bNeedKeyFrame)) {
    AVPictureType pictype = m_vc1Header->ParseVC1PictureType(buffer, buflen);
    if (m_bManualReorder) {
      if (pictype == AV_PICTURE_TYPE_I || pictype == AV_PICTURE_TYPE_P) {
        if (m_bReorderBufferValid)
          m_timestampQueue.push(m_rtReorderBuffer);
        m_rtReorderBuffer = rtStart;
        m_bReorderBufferValid = TRUE;
      } else {
        m_timestampQueue.push(rtStart);
      }
    }

    if (m_bNeedKeyFrame) {
      if (pictype != AV_PICTURE_TYPE_I) {
        if (m_bManualReorder)
          m_timestampQueue.pop();
        return S_OK;
      } else {
        m_bNeedKeyFrame = FALSE;
        bSyncPoint = TRUE;
      }
    }
  }

  IMFSample *pSample = nullptr;
  hr = MF.CreateSample(&pSample);
  if (FAILED(hr)) {
    DbgLog((LOG_ERROR, 10, L"Unable to allocate MF Sample, hr: 0x%x", hr));
    return E_FAIL;
  }
  
  IMFMediaBuffer *pMFBuffer = CreateMediaBuffer(buffer, buflen);
  if (!pMFBuffer) {
    DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffer"));
    SafeRelease(&pSample);
    return E_FAIL;
  }

  pSample->AddBuffer(pMFBuffer);

  if (rtStart != AV_NOPTS_VALUE) {
    pSample->SetSampleTime(rtStart);
    if (rtStop != AV_NOPTS_VALUE && rtStop > (rtStart - 1))
      pSample->SetSampleDuration(rtStop - rtStart);
  }

  pSample->SetUINT32(MFSampleExtension_CleanPoint,    bSyncPoint);
  pSample->SetUINT32(MFSampleExtension_Discontinuity, bDiscontinuity);

  hr = m_pMFT->ProcessInput(0, pSample, 0);

  if (hr == MF_E_NOTACCEPTING) {
    // Not accepting data right now, try to process output and try again
    ProcessOutput();
    hr = m_pMFT->ProcessInput(0, pSample, 0);
  }

  SafeRelease(&pMFBuffer);
  SafeRelease(&pSample);

  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> ProcessInput failed with hr: 0x%x", hr));
    return E_FAIL;
  }

  return ProcessOutput();
}
Beispiel #18
0
STDMETHODIMP CDecWMV9MFT::ProcessOutput()
{
  HRESULT hr = S_OK;
  DWORD dwStatus = 0;

  MFT_OUTPUT_STREAM_INFO outputInfo = {0};
  m_pMFT->GetOutputStreamInfo(0, &outputInfo);

  IMFMediaBuffer *pMFBuffer = nullptr;
  ASSERT(!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES));

  MFT_OUTPUT_DATA_BUFFER OutputBuffer = {0};
  if (!(outputInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) {
    pMFBuffer = GetBuffer(outputInfo.cbSize);
    if (!pMFBuffer) { DbgLog((LOG_TRACE, 10, L"Unable to allocate media buffere")); return E_FAIL; }
  
    IMFSample *pSampleOut = nullptr;
    hr = MF.CreateSample(&pSampleOut);
    if (FAILED(hr)) { DbgLog((LOG_TRACE, 10, L"Unable to allocate MF sample, hr: 0x%x", hr)); ReleaseBuffer(pMFBuffer); return E_FAIL; }
    
    pSampleOut->AddBuffer(pMFBuffer);
    OutputBuffer.pSample = pSampleOut;
  }
  hr = m_pMFT->ProcessOutput(0, 1, &OutputBuffer, &dwStatus);

  // We don't process events, just release them
  SafeRelease(&OutputBuffer.pEvents);

  // handle stream format changes
  if (hr == MF_E_TRANSFORM_STREAM_CHANGE || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE ) {
    SafeRelease(&OutputBuffer.pSample);
    ReleaseBuffer(pMFBuffer);
    hr = SelectOutputType();
    if (FAILED(hr)) {
      DbgLog((LOG_TRACE, 10, L"-> Failed to handle stream change, hr: %x", hr));
      return E_FAIL;
    }
    // try again with the new type, it should work now!
    return ProcessOutput();
  }
  
  // the MFT generated no output, discard the sample and return
  if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT || OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE) {
    SafeRelease(&OutputBuffer.pSample);
    ReleaseBuffer(pMFBuffer);
    return S_FALSE;
  }
  
  // unknown error condition
  if (FAILED(hr)) {
    DbgLog((LOG_TRACE, 10, L"-> ProcessOutput failed with hr: %x", hr));
    SafeRelease(&OutputBuffer.pSample);
    ReleaseBuffer(pMFBuffer);
    return E_FAIL;
  }

  LAVFrame *pFrame = nullptr;
  AllocateFrame(&pFrame);

  IMFMediaType *pMTOut = nullptr;
  m_pMFT->GetOutputCurrentType(0, &pMTOut);

  MFGetAttributeSize(pMTOut, MF_MT_FRAME_SIZE, (UINT32 *)&pFrame->width, (UINT32 *)&pFrame->height);
  pFrame->format = m_OutPixFmt;

  AVRational pixel_aspect_ratio = {1, 1};
  MFGetAttributeRatio(pMTOut, MF_MT_PIXEL_ASPECT_RATIO, (UINT32*)&pixel_aspect_ratio.num, (UINT32*)&pixel_aspect_ratio.den);

  AVRational display_aspect_ratio = {0, 0};
  av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den, (int64_t)pixel_aspect_ratio.num * pFrame->width, (int64_t)pixel_aspect_ratio.den * pFrame->height, INT_MAX);
  pFrame->aspect_ratio = display_aspect_ratio;

  pFrame->interlaced = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_Interlaced,       FALSE);
  pFrame->repeat     = MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_RepeatFirstField, FALSE);

  LAVDeintFieldOrder fo = m_pSettings->GetDeintFieldOrder();
  pFrame->tff = (fo == DeintFieldOrder_Auto) ? !MFGetAttributeUINT32(OutputBuffer.pSample, MFSampleExtension_BottomFieldFirst, FALSE) : (fo == DeintFieldOrder_TopFieldFirst);

  if (pFrame->interlaced && !m_bInterlaced)
    m_bInterlaced = TRUE;

  pFrame->interlaced = (pFrame->interlaced || (m_bInterlaced && m_pSettings->GetDeinterlacingMode() == DeintMode_Aggressive) || m_pSettings->GetDeinterlacingMode() == DeintMode_Force) && !(m_pSettings->GetDeinterlacingMode() == DeintMode_Disable);

  pFrame->ext_format.VideoPrimaries         = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_PRIMARIES,     MFVideoPrimaries_Unknown);
  pFrame->ext_format.VideoTransferFunction  = MFGetAttributeUINT32(pMTOut, MF_MT_TRANSFER_FUNCTION,   MFVideoTransFunc_Unknown);
  pFrame->ext_format.VideoTransferMatrix    = MFGetAttributeUINT32(pMTOut, MF_MT_YUV_MATRIX,          MFVideoTransferMatrix_Unknown);
  pFrame->ext_format.VideoChromaSubsampling = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
  pFrame->ext_format.NominalRange           = MFGetAttributeUINT32(pMTOut, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown);

  // HACK: don't flag range=limited if its the only value set, since its also the implied default, this helps to avoid a reconnect
  // The MFT always sets this value, even if the bitstream says nothing about it, causing a reconnect on every vc1/wmv3 file
  if (pFrame->ext_format.value == 0x2000)
    pFrame->ext_format.value = 0;

  // Timestamps
  if (m_bManualReorder) {
    if (!m_timestampQueue.empty()) {
      pFrame->rtStart = m_timestampQueue.front();
      m_timestampQueue.pop();
      
      LONGLONG llDuration = 0;
      hr = OutputBuffer.pSample->GetSampleDuration(&llDuration);
      if (SUCCEEDED(hr) && llDuration > 0) {
        pFrame->rtStop = pFrame->rtStart + llDuration;
      }
    }
  } else {
    LONGLONG llTimestamp = 0;
    hr = OutputBuffer.pSample->GetSampleTime(&llTimestamp);
    if (SUCCEEDED(hr)) {
      pFrame->rtStart = llTimestamp;
      
      LONGLONG llDuration = 0;
      hr = OutputBuffer.pSample->GetSampleDuration(&llDuration);
      if (SUCCEEDED(hr) && llDuration > 0) {
        pFrame->rtStop = pFrame->rtStart + llDuration;
      }
    }
  }

  SafeRelease(&pMTOut);

  // Lock memory in the buffer
  BYTE *pBuffer = nullptr;
  pMFBuffer->Lock(&pBuffer, NULL, NULL);

  // Check alignment
  // If not properly aligned, we need to make the data aligned.
  int alignment = (m_OutPixFmt == LAVPixFmt_NV12) ? 16 : 32;
  if ((pFrame->width % alignment) != 0) {
    hr = AllocLAVFrameBuffers(pFrame);
    if (FAILED(hr)) {
      pMFBuffer->Unlock();
      ReleaseBuffer(pMFBuffer);
      SafeRelease(&OutputBuffer.pSample);
      return hr;
    }
    size_t ySize = pFrame->width * pFrame->height;
    
    memcpy_plane(pFrame->data[0], pBuffer, pFrame->width, pFrame->stride[0], pFrame->height);
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      memcpy_plane(pFrame->data[1], pBuffer + ySize, pFrame->width, pFrame->stride[1], pFrame->height / 2);
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      size_t uvSize = ySize / 4;
      memcpy_plane(pFrame->data[2], pBuffer + ySize, pFrame->width / 2, pFrame->stride[2], pFrame->height / 2);
      memcpy_plane(pFrame->data[1], pBuffer + ySize + uvSize, pFrame->width / 2, pFrame->stride[1], pFrame->height / 2);
    }
    pMFBuffer->Unlock();
    ReleaseBuffer(pMFBuffer);
  } else {
    if (m_OutPixFmt == LAVPixFmt_NV12) {
      pFrame->data[0] = pBuffer;
      pFrame->data[1] = pBuffer + pFrame->width * pFrame->height;
      pFrame->stride[0] = pFrame->stride[1] = pFrame->width;
    } else if (m_OutPixFmt == LAVPixFmt_YUV420) {
      pFrame->data[0] = pBuffer;
      pFrame->data[2] = pBuffer + pFrame->width * pFrame->height;
      pFrame->data[1] = pFrame->data[2] + (pFrame->width / 2) * (pFrame->height / 2);
      pFrame->stride[0] = pFrame->width;
      pFrame->stride[1] = pFrame->stride[2] = pFrame->width / 2;
    }
    pFrame->data[3] = (BYTE *)pMFBuffer;
    pFrame->destruct = wmv9_buffer_destruct;
    pFrame->priv_data = this;
  }
  pFrame->flags |= LAV_FRAME_FLAG_BUFFER_MODIFY;
  Deliver(pFrame);

  SafeRelease(&OutputBuffer.pSample);

  if (OutputBuffer.dwStatus == MFT_OUTPUT_DATA_BUFFER_INCOMPLETE)
    return ProcessOutput();
  return hr;
}
Beispiel #19
0
void VideoCompressor::AudioSample32Bit2Channel(float *Samples, UINT FrameCount, UINT64 CaptureStartTime)
{
    //double TimeInSeconds = _Clock->Elapsed();

    const UINT SamplesPerSecond = 44100;
    const UINT ChannelCount = 2;
    const UINT SampleCount = FrameCount * ChannelCount;
    const UINT BitsPerSample = 16;
    const UINT BufferLength = BitsPerSample / 8 * ChannelCount * FrameCount;
    const LONGLONG SampleDuration = LONGLONG(FrameCount) * LONGLONG(10000000) / SamplesPerSecond; // in hns
    
    //
    // Write some data
    //
    IMFSample *spSample;
    IMFMediaBuffer *spBuffer;
    BYTE *pbBuffer = NULL;

    //
    // Create a media sample
    //

    HRESULT hr = MFCreateSample( &spSample );
    hr = spSample->SetSampleDuration( SampleDuration );
    
    //hr = spSample->SetSampleTime( LONGLONG( TimeInSeconds * 10000000.0 ) );
    
    //CaptureStartTime = 10,000,000 * t / f;
    //t = CaptureStartTime * f / 10,000,000
    LONGLONG FileStartCounter = _Clock->StartTime();
    LONGLONG CaptureStartCounter = CaptureStartTime * _Clock->TicksPerSecond() / LONGLONG(10000000);
    hr = spSample->SetSampleTime( ( CaptureStartCounter - FileStartCounter ) * LONGLONG(10000000) / _Clock->TicksPerSecond() );

    //
    // Add a media buffer filled with random data
    //

    hr = MFCreateMemoryBuffer( BufferLength, &spBuffer );
    hr = spBuffer->SetCurrentLength( BufferLength );
    hr = spSample->AddBuffer( spBuffer );

    hr = spBuffer->Lock( &pbBuffer, NULL, NULL );
    __int16 *OutputAudioBuffer = (__int16 *)pbBuffer;
    for(UINT SampleIndex = 0; SampleIndex < SampleCount; SampleIndex++)
    {
        //
        // Floats are in the range -1 to 1
        //
        OutputAudioBuffer[SampleIndex] = int(Samples[SampleIndex] * 32768.0f);
    }
    hr = spBuffer->Unlock();

    //
    // Write the media sample
    //
    hr = _Writer->WriteSample( 1, spSample );
    PersistentAssert(SUCCEEDED(hr), "WriteSample failed");

    spSample->Release();
    spBuffer->Release();
}
	virtual void doGetNextFrame()
	{
		if (!_isInitialised)
		{
			_isInitialised = true;
			if (!initialise())
			{
				printf("Video device initialisation failed, stopping.");
				return;
			}
		}

		if (!isCurrentlyAwaitingData()) return;

		DWORD processOutputStatus = 0;
		IMFSample *videoSample = NULL;
		DWORD streamIndex, flags;
		LONGLONG llVideoTimeStamp, llSampleDuration;
		HRESULT mftProcessInput = S_OK;
		HRESULT mftProcessOutput = S_OK;
		MFT_OUTPUT_STREAM_INFO StreamInfo;
		IMFMediaBuffer *pBuffer = NULL;
		IMFSample *mftOutSample = NULL;
		DWORD mftOutFlags;
		bool frameSent = false;

		CHECK_HR(_videoReader->ReadSample(
			MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			0,                              // Flags.
			&streamIndex,                   // Receives the actual stream index. 
			&flags,                         // Receives status flags.
			&llVideoTimeStamp,              // Receives the time stamp.
			&videoSample                    // Receives the sample or NULL.
			), "Error reading video sample.");

		if (videoSample)
		{
			_frameCount++;

			CHECK_HR(videoSample->SetSampleTime(llVideoTimeStamp), "Error setting the video sample time.\n");
			CHECK_HR(videoSample->GetSampleDuration(&llSampleDuration), "Error getting video sample duration.\n");

			// Pass the video sample to the H.264 transform.

			CHECK_HR(_pTransform->ProcessInput(0, videoSample, 0), "The resampler H264 ProcessInput call failed.\n");

			CHECK_HR(_pTransform->GetOutputStatus(&mftOutFlags), "H264 MFT GetOutputStatus failed.\n");

			if (mftOutFlags == MFT_OUTPUT_STATUS_SAMPLE_READY)
			{
				printf("Sample ready.\n");

				CHECK_HR(_pTransform->GetOutputStreamInfo(0, &StreamInfo), "Failed to get output stream info from H264 MFT.\n");

				CHECK_HR(MFCreateSample(&mftOutSample), "Failed to create MF sample.\n");
				CHECK_HR(MFCreateMemoryBuffer(StreamInfo.cbSize, &pBuffer), "Failed to create memory buffer.\n");
				CHECK_HR(mftOutSample->AddBuffer(pBuffer), "Failed to add sample to buffer.\n");

				while (true)
				{
					_outputDataBuffer.dwStreamID = 0;
					_outputDataBuffer.dwStatus = 0;
					_outputDataBuffer.pEvents = NULL;
					_outputDataBuffer.pSample = mftOutSample;

					mftProcessOutput = _pTransform->ProcessOutput(0, 1, &_outputDataBuffer, &processOutputStatus);

					if (mftProcessOutput != MF_E_TRANSFORM_NEED_MORE_INPUT)
					{
						CHECK_HR(_outputDataBuffer.pSample->SetSampleTime(llVideoTimeStamp), "Error setting MFT sample time.\n");
						CHECK_HR(_outputDataBuffer.pSample->SetSampleDuration(llSampleDuration), "Error setting MFT sample duration.\n");

						IMFMediaBuffer *buf = NULL;
						DWORD bufLength;
						CHECK_HR(_outputDataBuffer.pSample->ConvertToContiguousBuffer(&buf), "ConvertToContiguousBuffer failed.\n");
						CHECK_HR(buf->GetCurrentLength(&bufLength), "Get buffer length failed.\n");
						BYTE * rawBuffer = NULL;

						auto now = GetTickCount();

						printf("Writing sample %i, spacing %I64dms, sample time %I64d, sample duration %I64d, sample size %i.\n", _frameCount, now - _lastSendAt, llVideoTimeStamp, llSampleDuration, bufLength);

						fFrameSize = bufLength;
						fDurationInMicroseconds = 0;
						gettimeofday(&fPresentationTime, NULL);

						buf->Lock(&rawBuffer, NULL, NULL);
						memmove(fTo, rawBuffer, fFrameSize);

						FramedSource::afterGetting(this);

						buf->Unlock();
						SafeRelease(&buf);

						frameSent = true;
						_lastSendAt = GetTickCount();
					}

					SafeRelease(&pBuffer);
					SafeRelease(&mftOutSample);

					break;
				}
			}
			else {
				printf("No sample.\n");
			}

			SafeRelease(&videoSample);
		}

		if (!frameSent)
		{
			envir().taskScheduler().triggerEvent(eventTriggerId, this);
		}

		return;

	done:

		printf("MediaFoundationH264LiveSource doGetNextFrame failed.\n");
	}
// Process the incomming NAL from the queue: wraps it up into a
// IMFMediaSample, sends it to the decoder.
//
// Thread context: decoder thread
bool DecoderMF::DoProcessInputNAL(IBMDStreamingH264NALPacket* nalPacket)
{
	bool				ret = false;
	HRESULT				hr;
	IMFMediaBuffer*		newBuffer = NULL;
	BYTE*				newBufferPtr;
	void*				nalPacketPtr;
	//
	IMFSample*			newSample = NULL;
	ULONGLONG			nalPresentationTime;
	const BYTE			nalPrefix[] = {0, 0, 0, 1};

	// Get a pointer to the NAL data
	hr = nalPacket->GetBytes(&nalPacketPtr);
	if (FAILED(hr))
		goto bail;

	// Create the MF media buffer (+ 4 bytes for the NAL Prefix (0x00 0x00 0x00 0x01)
	// which MF requires.
	hr = MFCreateMemoryBuffer(nalPacket->GetPayloadSize()+4, &newBuffer);
	if (FAILED(hr))
		goto bail;

	// Lock the MF media buffer
	hr = newBuffer->Lock(&newBufferPtr, NULL, NULL);
	if (FAILED(hr))
		goto bail;

	// Copy the prefix and the data
	memcpy(newBufferPtr, nalPrefix, 4);
	memcpy(newBufferPtr+4, nalPacketPtr, nalPacket->GetPayloadSize());

	// Unlock the MF media buffer
	hr = newBuffer->Unlock();
	if (FAILED(hr))
		goto bail;

	// Update the current length of the MF media buffer
	hr = newBuffer->SetCurrentLength(nalPacket->GetPayloadSize()+4);
	if (FAILED(hr))
		goto bail;

	// We now have a IMFMediaBuffer with the contents of the NAL
	// packet. We now construct a IMFSample with the buffer
	hr = MFCreateSample(&newSample);
	if (FAILED(hr))
		goto bail;

	hr = newSample->AddBuffer(newBuffer);
	if (FAILED(hr))
		goto bail;

	// Get the presentation (display) time in 100-nanosecond units
	// TODO: this is pretty meaningless without setting the start time.
	hr = nalPacket->GetDisplayTime(1000 * 1000 * 10, &nalPresentationTime);
	if (FAILED(hr))
		goto bail;

	// Set presentation time on the sample
	hr = newSample->SetSampleTime(nalPresentationTime);
	if (FAILED(hr))
		goto bail;

	// Now parse it to the decoder
	for (;;)
	{
		hr = m_h264Decoder->ProcessInput(0, newSample, 0);
		if (hr == S_OK)
			break;
		if (hr != MF_E_NOTACCEPTING || DoProcessOutput() == false)
			goto bail;
	}

	ret = true;

bail:
	if (newBuffer != NULL)
		newBuffer->Release();

	if (newSample != NULL)
		newSample->Release();

	return ret;
}
HRESULT CHWMFT::ProcessOutput(
    DWORD                   dwFlags,
    DWORD                   dwOutputBufferCount,
    MFT_OUTPUT_DATA_BUFFER* pOutputSamples,
    DWORD*                  pdwStatus)
{
    /*****************************************
    ** See http://msdn.microsoft.com/en-us/library/ms704014(v=VS.85).aspx
    *****************************************/

    HRESULT     hr      = S_OK;
    IMFSample*  pSample = NULL;

    TraceString(CHMFTTracing::TRACE_INFORMATION, L"%S(): Enter",  __FUNCTION__);

    do
    {
        if(IsLocked() != FALSE)
        {
            hr = MF_E_TRANSFORM_ASYNC_LOCKED;
            break;
        }

        {
            CAutoLock lock(&m_csLock);

            TraceString(CHMFTTracing::TRACE_INFORMATION, L"%S(): HaveOutputCount: %u",  __FUNCTION__, m_dwHaveOutputCount);

            if(m_dwHaveOutputCount == 0)
            {
                // This call does not correspond to a have output call

                hr = E_UNEXPECTED;
                break;
            }
            else
            {
                m_dwHaveOutputCount--;
            }
        }

        /*****************************************
        ** Todo: If your MFT supports more than one
        ** stream, make sure you modify
        ** MFT_MAX_STREAMS and adjust this function
        ** accordingly
        *****************************************/
        if(dwOutputBufferCount < MFT_MAX_STREAMS)
        {
            hr = E_INVALIDARG;
            break;
        }

        if(IsMFTReady() == FALSE)
        {
            hr = MF_E_TRANSFORM_TYPE_NOT_SET;
            break;
        }

        /***************************************
        ** Since this in an internal function
        ** we know m_pOutputSampleQueue can never be
        ** NULL due to InitializeTransform()
        ***************************************/
        hr = m_pOutputSampleQueue->GetNextSample(&pSample);
        if(FAILED(hr))
        {
            break;
        }

        if(pSample == NULL)
        {
            hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
            break;
        }

        /*******************************
        ** Todo: This MFT only has one
        ** input stream, so the output
        ** samples array and stream ID
        ** will only use the first
        ** member
        *******************************/
        pOutputSamples[0].dwStreamID    = 0;
        
        if((pOutputSamples[0].pSample) == NULL)
        {
            // The MFT is providing it's own samples
            (pOutputSamples[0].pSample)   = pSample;
            (pOutputSamples[0].pSample)->AddRef();
        }
        else
        {
            // The pipeline has allocated the samples
            IMFMediaBuffer* pBuffer = NULL;

            do
            {
                hr = pSample->ConvertToContiguousBuffer(&pBuffer);
                if(FAILED(hr))
                {
                    break;
                }

                hr = (pOutputSamples[0].pSample)->AddBuffer(pBuffer);
                if(FAILED(hr))
                {
                    break;
                }
            }while(false);

            SAFERELEASE(pBuffer);

            if(FAILED(hr))
            {
                break;
            }
        }

        /***************************************
        ** Since this in an internal function
        ** we know m_pOutputSampleQueue can never be
        ** NULL due to InitializeTransform()
        ***************************************/
        if(m_pOutputSampleQueue->IsQueueEmpty() != FALSE)
        {
            // We're out of samples in the output queue
            CAutoLock lock(&m_csLock);

            if((m_dwStatus & MYMFT_STATUS_DRAINING) != 0)
            {
                // We're done draining, time to send the event
                IMFMediaEvent*  pDrainCompleteEvent  = NULL;

                do
                {
                    hr = MFCreateMediaEvent(METransformDrainComplete , GUID_NULL, S_OK, NULL, &pDrainCompleteEvent);
                    if(FAILED(hr))
                    {
                        break;
                    }

                    /*******************************
                    ** Todo: This MFT only has one
                    ** input stream, so the drain
                    ** is always on stream zero.
                    ** Update this is your MFT
                    ** has more than one stream
                    *******************************/
                    hr = pDrainCompleteEvent->SetUINT32(MF_EVENT_MFT_INPUT_STREAM_ID, 0);
                    if(FAILED(hr))
                    {
                        break;
                    }

                    /***************************************
                    ** Since this in an internal function
                    ** we know m_pEventQueue can never be
                    ** NULL due to InitializeTransform()
                    ***************************************/
                    hr = m_pEventQueue->QueueEvent(pDrainCompleteEvent);
                    if(FAILED(hr))
                    {
                        break;
                    }
                }while(false);

                SAFERELEASE(pDrainCompleteEvent);

                if(FAILED(hr))
                {
                    break;
                }

                m_dwStatus &= (~MYMFT_STATUS_DRAINING);
            }
        }
    }while(false);

    SAFERELEASE(pSample);

    TraceString(CHMFTTracing::TRACE_INFORMATION, L"%S(): Exit (hr=0x%x)",  __FUNCTION__, hr);

    return hr;
}