예제 #1
0
STDMETHODIMP CDecWMV9MFT::SelectOutputType()
{
  HRESULT hr = S_OK;
  int idx = 0;

  m_OutPixFmt = LAVPixFmt_None;
  IMFMediaType *pMTOut = nullptr;
  while (SUCCEEDED(hr = m_pMFT->GetOutputAvailableType(0, idx++, &pMTOut)) && m_OutPixFmt == LAVPixFmt_None) {
    GUID outSubtype;
    if (SUCCEEDED(pMTOut->GetGUID(MF_MT_SUBTYPE, &outSubtype))) {
      if (outSubtype == MEDIASUBTYPE_NV12) {
        hr = m_pMFT->SetOutputType(0, pMTOut, 0);
        m_OutPixFmt = LAVPixFmt_NV12;
        break;
      } else if (outSubtype == MEDIASUBTYPE_YV12) {
        hr = m_pMFT->SetOutputType(0, pMTOut, 0);
        m_OutPixFmt = LAVPixFmt_YUV420;
        break;
      }
    }
    SafeRelease(&pMTOut);
  }

  return hr;
}
예제 #2
0
//-------------------------------------------------------------------
// getVideoFormat:  Gets format information for the video stream.
//
HRESULT VidReader::getVideoFormat()
{
    HRESULT hr = S_OK;
    IMFMediaType *pType = NULL;
	GUID subtype = { 0 };

    // Get the media type from the stream.
    hr = m_pReader->GetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType);
    if (FAILED(hr)) goto done;

    // Make sure it is a video format.
    
    hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
    if (subtype != MFVideoFormat_RGB32)
    {
        hr = E_UNEXPECTED;
        goto done;
    }

    // Get the width and height
    hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &m_imagewidth, &m_imageheight);
    if (FAILED(hr)) goto done;

	// Get the frame rate
	UINT32 frN, frD;
	hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &frN, &frD);   
	if (FAILED(hr)) goto done;
	m_framerate = (double)frN / (double)frD;

done:
    
	SafeRelease(&pType);
    return hr;
}
예제 #3
0
HRESULT CEvrCustomMixer::GetOutputAvailableType(DWORD dwIndex, IMFMediaType **ppType)
{
	HRESULT hr;
	IMFMediaType *pmt;

	*ppType = 0;
	if (dwIndex >= s_nAvailableOutputTypes)
		return MF_E_NO_MORE_TYPES;

	hr = CDynLibManager::GetInstance()->pfnMFCreateMediaType(&pmt);
	if (SUCCEEDED(hr))
	{
		hr = pmt->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
		ASSERT(SUCCEEDED(hr));
		hr = pmt->SetGUID(MF_MT_SUBTYPE, s_pAvailableOutputTypes[dwIndex]);
		ASSERT(SUCCEEDED(hr));
		*ppType = pmt;

		UINT uWidth, uHeight, uNumerator, uDenominator;
		hr = MFGetAttributeSize(m_pInputTypes[0], MF_MT_FRAME_SIZE, &uWidth, &uHeight);
		ASSERT(SUCCEEDED(hr));
		hr = MFGetAttributeRatio(m_pInputTypes[0], MF_MT_PIXEL_ASPECT_RATIO, &uNumerator, &uDenominator);
		ASSERT(SUCCEEDED(hr));
		hr = MFSetAttributeSize(pmt, MF_MT_FRAME_SIZE, uWidth, uHeight);
		ASSERT(SUCCEEDED(hr));
		hr = MFSetAttributeRatio(pmt, MF_MT_PIXEL_ASPECT_RATIO, uNumerator, uDenominator);
		ASSERT(SUCCEEDED(hr));
	}
	return hr;
}
예제 #4
0
/*
List all the media modes available on the device.
*/
void ListModes(IMFSourceReader *pReader)
{
	HRESULT hr = NULL;
	DWORD dwMediaTypeIndex = 0;

	while (SUCCEEDED(hr))
	{
		IMFMediaType *pType = NULL;
		hr = pReader->GetNativeMediaType(0, dwMediaTypeIndex, &pType);
		if (hr == MF_E_NO_MORE_TYPES)
		{
			hr = S_OK;
			break;
		}
		else if (SUCCEEDED(hr))
		{
			// Examine the media type. (Not shown.)
			CMediaTypeTrace *nativeTypeMediaTrace = new CMediaTypeTrace(pType);
			printf("Native media type: %s.\n", nativeTypeMediaTrace->GetString());

			pType->Release();
		}
		++dwMediaTypeIndex;
	}
}
예제 #5
0
파일: capture.cpp 프로젝트: NathanW2/escapi
int CaptureClass::scanMediaTypes(unsigned int aWidth, unsigned int aHeight)
{
	HRESULT hr;
	HRESULT nativeTypeErrorCode = S_OK;
	DWORD count = 0;
	int besterror = 0xfffffff;
	int bestfit = 0;

	while (nativeTypeErrorCode == S_OK && besterror)
	{
		IMFMediaType * nativeType = NULL;
		nativeTypeErrorCode = mReader->GetNativeMediaType(
			(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
			count,
			&nativeType);
		ScopedRelease<IMFMediaType> nativeType_s(nativeType);

		if (nativeTypeErrorCode != S_OK) continue;

		// get the media type 
		GUID nativeGuid = { 0 };
		hr = nativeType->GetGUID(MF_MT_SUBTYPE, &nativeGuid);

		if (FAILED(hr)) return bestfit;

		if (isMediaOk(nativeType, count))
		{
			UINT32 width, height;
			hr = MFGetAttributeSize(nativeType, MF_MT_FRAME_SIZE, &width, &height);

			if (FAILED(hr)) return bestfit;

			int error = 0;

			// prefer (hugely) to get too much than too little data..

			if (aWidth < width) error += (width - aWidth);
			if (aHeight < height) error += (height - aHeight);
			if (aWidth > width) error += (aWidth - width) * 2;
			if (aHeight > height) error += (aHeight - height) * 2;

			if (aWidth == width && aHeight == height) // ..but perfect match is a perfect match
				error = 0;

			if (besterror > error)
			{
				besterror = error;
				bestfit = count;
			}
			/*
			char temp[1024];
			sprintf(temp, "%d x %d, %x:%x:%x:%x %d %d\n", width, height, nativeGuid.Data1, nativeGuid.Data2, nativeGuid.Data3, nativeGuid.Data4, bestfit == count, besterror);
			OutputDebugStringA(temp);
			*/
		}

		count++;
	}
	return bestfit;
}
예제 #6
0
//-------------------------------------------------------------------
// selectVideoStream:  Finds the first video stream and sets the format to RGB32.
//
HRESULT VidReader::selectVideoStream()
{
    HRESULT hr = S_OK;
    IMFMediaType *pType = NULL;

    // Configure the source reader to give us progressive RGB32 frames.
    // The source reader will load the decoder if needed.

    hr = MFCreateMediaType(&pType);
	if (FAILED(hr)) goto done;

    hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
	if (FAILED(hr)) goto done;

    hr = pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
	if (FAILED(hr)) goto done;

	hr = m_pReader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, pType);
	if (FAILED(hr)) goto done;

    // Ensure the stream is selected.
    hr = m_pReader->SetStreamSelection((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, TRUE);
	if (FAILED(hr)) goto done;

done:

    SafeRelease(&pType);
    return hr;
}
예제 #7
0
파일: capture.cpp 프로젝트: goussepi/lab
HRESULT SetMaxFrameRate(IMFMediaSource *pSource, DWORD dwTypeIndex)
{
  IMFPresentationDescriptor *pPD = NULL;
  IMFStreamDescriptor *pSD = NULL;
  IMFMediaTypeHandler *pHandler = NULL;
  IMFMediaType *pType = NULL;

  HRESULT hr = pSource->CreatePresentationDescriptor(&pPD);
  if (FAILED(hr))
  {
    goto done;
  }

  BOOL fSelected;
  hr = pPD->GetStreamDescriptorByIndex(dwTypeIndex, &fSelected, &pSD);
  if (FAILED(hr))
  {
    goto done;
  }

  hr = pSD->GetMediaTypeHandler(&pHandler);
  if (FAILED(hr))
  {
    goto done;
  }

  hr = pHandler->GetCurrentMediaType(&pType);
  if (FAILED(hr))
  {
    goto done;
  }

  // Get the maximum frame rate for the selected capture format.

  // Note: To get the minimum frame rate, use the 
  // MF_MT_FRAME_RATE_RANGE_MIN attribute instead.

  PROPVARIANT var;
  if (SUCCEEDED(pType->GetItem(MF_MT_FRAME_RATE_RANGE_MAX, &var)))
  {
    hr = pType->SetItem(MF_MT_FRAME_RATE, var);

    PropVariantClear(&var);

    if (FAILED(hr))
    {
      goto done;
    }

    hr = pHandler->SetCurrentMediaType(pType);
  }

done:
  SafeRelease(&pPD);
  SafeRelease(&pSD);
  SafeRelease(&pHandler);
  SafeRelease(&pType);
  return hr;
}
예제 #8
0
// 创建sink writer. 返回流索引.
HRESULT VideoEncoder::CreateSinkWriter(IMFSinkWriter** ppSinkWriter, DWORD* pStreamIndex)
{
	HRESULT hr = S_OK;
	if (this->m_outputFile == L"")
	{
		return ERROR_FILE_INVALID;
	}

	// 创建sink writer.
	*ppSinkWriter = nullptr;	
	IMFSinkWriter *pSinkWriter = nullptr;
	IMFMediaType* pOutputMediaType = nullptr;
	IMFMediaType *pInMediaType = nullptr;   
	CheckHR(MFCreateSinkWriterFromURL(this->m_outputFile.c_str(), nullptr, nullptr, &pSinkWriter));

	// 创建和配置输出媒体类型.
	CheckHR(MFCreateMediaType(&pOutputMediaType));
	CheckHR(pOutputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
	CheckHR(pOutputMediaType->SetGUID(MF_MT_SUBTYPE, this->m_outputVideoFormat));
	CheckHR(pOutputMediaType->SetUINT32(MF_MT_AVG_BITRATE, this->m_videoBitRate));
	CheckHR(pOutputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));

	CheckHR(MFSetAttributeSize(pOutputMediaType, MF_MT_FRAME_SIZE, this->m_frameWidth, this->m_frameHeight));
	CheckHR(MFSetAttributeRatio(pOutputMediaType, MF_MT_FRAME_RATE, (UINT32)this->m_fps, 1));
	CheckHR(MFSetAttributeRatio(pOutputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
	DWORD streamIndex;
	CheckHR(pSinkWriter->AddStream(pOutputMediaType, &streamIndex));

	// 设置输入的媒体类型.
    CheckHR(MFCreateMediaType(&pInMediaType));   
    CheckHR(pInMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));   
    CheckHR(pInMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32));     
    CheckHR(pInMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)); 

	// 输入的步幅信息不为所有输出编码解码器需要.但某些编解码器需要它,如 H.264.
	// 如果步幅是去掉,或设置为负值,H.264 将从下到上处理图像.
	CheckHR(pInMediaType->SetUINT32(MF_MT_DEFAULT_STRIDE, this->m_frameStride));
    CheckHR(MFSetAttributeSize(pInMediaType, MF_MT_FRAME_SIZE, this->m_frameWidth, this->m_frameHeight));
    CheckHR(MFSetAttributeRatio(pInMediaType, MF_MT_FRAME_RATE, (UINT32)this->m_fps, 1));   
    CheckHR(MFSetAttributeRatio(pInMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
    CheckHR(pSinkWriter->SetInputMediaType(streamIndex, pInMediaType, nullptr));   

	// 开始编写.
	CheckHR(pSinkWriter->BeginWriting());

	*ppSinkWriter = pSinkWriter;
	(*ppSinkWriter)->AddRef();
	*pStreamIndex = streamIndex;

cleanup:
	if (!SUCCEEDED(hr))
	{
		DWORD error = GetLastError();
		this->m_logFileStream << "意外错误: " << error << endl;
	}
	SafeRelease(&pSinkWriter);
	SafeRelease(&pOutputMediaType);
	return hr;
}
예제 #9
0
HRESULT ConfigureVideoEncoding(IMFCaptureSource *pSource, IMFCaptureRecordSink *pRecord, REFGUID guidEncodingType)
{
    IMFMediaType *pMediaType = NULL;
    IMFMediaType *pMediaType2 = NULL;
    GUID guidSubType = GUID_NULL;

    // Configure the video format for the recording sink.
    HRESULT hr = pSource->GetCurrentDeviceMediaType((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD , &pMediaType);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CloneVideoMediaType(pMediaType, guidEncodingType, &pMediaType2);
    if (FAILED(hr))
    {
        goto done;
    }


    hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &guidSubType);
    if(FAILED(hr))
    {
        goto done;
    }

    if(guidSubType == MFVideoFormat_H264_ES || guidSubType == MFVideoFormat_H264)
    {
        //When the webcam supports H264_ES or H264, we just bypass the stream. The output from Capture engine shall be the same as the native type supported by the webcam
        hr = pMediaType2->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
    }
    else
    {    
        UINT32 uiEncodingBitrate;
        hr = GetEncodingBitrate(pMediaType2, &uiEncodingBitrate);
        if (FAILED(hr))
        {
            goto done;
        }

        hr = pMediaType2->SetUINT32(MF_MT_AVG_BITRATE, uiEncodingBitrate);
    }

    if (FAILED(hr))
    {
        goto done;
    }

    // Connect the video stream to the recording sink.
    DWORD dwSinkStreamIndex;
    hr = pRecord->AddStream((DWORD)MF_CAPTURE_ENGINE_PREFERRED_SOURCE_STREAM_FOR_VIDEO_RECORD, pMediaType2, NULL, &dwSinkStreamIndex);

done:
    SafeRelease(&pMediaType);
    SafeRelease(&pMediaType2);
    return hr;
}
// This is called from the DoProcessOutput method if the stream format
// has changed.
//
// Thread context: decoder thread
bool DecoderMF::HandleStreamChange()
{
	bool			ret = false;
	HRESULT			hr;
	DWORD			numOutputStreams;
	IMFMediaType*	mediaType = NULL;
	AM_MEDIA_TYPE*  mformt = NULL;

	hr = m_h264Decoder->GetStreamCount(NULL, &numOutputStreams);
	if (FAILED(hr))
		goto bail;

	if (numOutputStreams != 1)
		goto bail;

	hr = S_OK;
	int idx = 0;
	while (hr == S_OK)
	{
		hr = m_h264Decoder->GetOutputAvailableType(0, idx, &mediaType);
		if (FAILED(hr))
			goto bail;

		mediaType->GetRepresentation(FORMAT_MFVideoFormat , (LPVOID*)&mformt);
	    MFVIDEOFORMAT* z = (MFVIDEOFORMAT*)mformt->pbFormat;
		unsigned int format = z->surfaceInfo.Format;
		mediaType->FreeRepresentation(FORMAT_MFVideoFormat ,(LPVOID)mformt);

		if (format == '2YUY')
			break;

		++idx;
	}

	hr = m_h264Decoder->SetOutputType(0, mediaType, 0);
	if (FAILED(hr))
		goto bail;

	if (! CreateOutputSample())
		goto bail;

	if (m_previewWindow != NULL)
	{
		if (! m_previewWindow->SetMediaType(mediaType))
			goto bail;
		m_previewConfigured = true;
	}

	ret = true;

bail:
	if (mediaType != NULL)
		mediaType->Release();

	return ret;
}
예제 #11
0
HRESULT CloneVideoMediaType(IMFMediaType *pSrcMediaType, REFGUID guidSubType, IMFMediaType **ppNewMediaType)
{
    IMFMediaType *pNewMediaType = NULL;

    HRESULT hr = MFCreateMediaType(&pNewMediaType);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = pNewMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);     
    if (FAILED(hr))
    {
        goto done;
    }

    hr = pNewMediaType->SetGUID(MF_MT_SUBTYPE, guidSubType);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CopyAttribute(pSrcMediaType, pNewMediaType, MF_MT_FRAME_SIZE);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CopyAttribute(pSrcMediaType, pNewMediaType, MF_MT_FRAME_RATE);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CopyAttribute(pSrcMediaType, pNewMediaType, MF_MT_PIXEL_ASPECT_RATIO);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CopyAttribute(pSrcMediaType, pNewMediaType, MF_MT_INTERLACE_MODE);
    if (FAILED(hr))
    {
        goto done;
    }

    *ppNewMediaType = pNewMediaType;
    (*ppNewMediaType)->AddRef();

done:
    SafeRelease(&pNewMediaType);
    return hr;
}
예제 #12
0
int select_output_mediatype(IMFTransform *transform, int out_stream_id, GUID audio_format) {
	HRESULT hr = S_OK;
	UINT32 tmp;
	IMFMediaType *t;

	// If you know what you need and what you are doing, you can specify the condition instead of searching
	// but it's better to use search since MFT may or may not support your output parameters
	for (DWORD i = 0; ; i++)
	{
		hr = transform->GetOutputAvailableType(out_stream_id, i, &t);
		if (hr == MF_E_NO_MORE_TYPES || hr == E_NOTIMPL)
		{
			return 0;
		}
		if (FAILED(hr))
		{
			ReportError(L"failed to get output types for MFT", hr);
			return -1;
		}

		hr = t->GetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, &tmp);

		if (FAILED(hr)) continue;
		// select PCM-16 format
		if (tmp == 16)
		{
			hr = transform->SetOutputType(out_stream_id, t, 0);
			if (FAILED(hr))
			{
				ReportError(L"failed to select output types for MFT", hr);
				return -1;
			}
			return 0;
		}
		else {
			continue;
		}

		return -1;
	}

	ReportError(L"MFT: Unable to find preferred output format", E_NOTIMPL);
	return -1;
}
예제 #13
0
/*
List all the media modes available on the device.
*/
void FindVideoMode(IMFSourceReader *pReader, const GUID mediaSubType, int width, int height, /* out */ IMFMediaType *&foundpType)
{
	HRESULT hr = NULL;
	DWORD dwMediaTypeIndex = 0;

	while (SUCCEEDED(hr))
	{
		IMFMediaType *pType = NULL;
		hr = pReader->GetNativeMediaType(0, dwMediaTypeIndex, &pType);
		if (hr == MF_E_NO_MORE_TYPES)
		{
			hr = S_OK;
			break;
		}
		else if (SUCCEEDED(hr))
		{
			// Examine the media type. (Not shown.)
			/*CMediaTypeTrace *nativeTypeMediaTrace = new CMediaTypeTrace(pType);
			printf("Native media type: %s.\n", nativeTypeMediaTrace->GetString());*/

			GUID videoSubType;
			UINT32 pWidth = 0, pHeight = 0;

			hr = pType->GetGUID(MF_MT_SUBTYPE, &videoSubType);
			MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &pWidth, &pHeight);

			if (SUCCEEDED(hr))
			{
				//printf("Video subtype %s, width=%i, height=%i.\n", STRING_FROM_GUID(videoSubType), pWidth, pHeight);

				if (videoSubType == mediaSubType && pWidth == width && pHeight == height)
				{
					foundpType = pType;
					printf("Media type successfully located.\n");
					break;
				}
			}

			pType->Release();
		}
		++dwMediaTypeIndex;
	}
}
예제 #14
0
IMFMediaType* MFDecoderSourceReader::setSource(IMFMediaSource *source, const QAudioFormat &audioFormat)
{
    IMFMediaType *mediaType = NULL;
    if (m_source == source)
        return mediaType;
    if (m_source) {
        m_source->Release();
        m_source = NULL;
    }
    if (m_sourceReader) {
        m_sourceReader->Release();
        m_sourceReader = NULL;
    }
    if (!source)
        return mediaType;
    IMFAttributes *attr = NULL;
    MFCreateAttributes(&attr, 1);
    if (SUCCEEDED(attr->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, this))) {
        if (SUCCEEDED(MFCreateSourceReaderFromMediaSource(source, attr, &m_sourceReader))) {
            m_source = source;
            m_source->AddRef();
            m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_ALL_STREAMS), FALSE);
            m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
            IMFMediaType *pPartialType = NULL;
            MFCreateMediaType(&pPartialType);
            pPartialType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);

            if (audioFormat.sampleType() == QAudioFormat::Float) {
                pPartialType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float);
            } else {
                pPartialType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
            }

            m_sourceReader->SetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), NULL, pPartialType);
            pPartialType->Release();
            m_sourceReader->GetCurrentMediaType(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), &mediaType);
            // Ensure the stream is selected.
            m_sourceReader->SetStreamSelection(DWORD(MF_SOURCE_READER_FIRST_AUDIO_STREAM), TRUE);
        }
        attr->Release();
    }
    return mediaType;
}
예제 #15
0
HRESULT CEvrCustomMixer::GetInputAvailableType(DWORD dwIndex, IMFMediaType **ppType)
{
	HRESULT hr;
	IMFMediaType *pmt;

	*ppType = 0;
	if (dwIndex >= s_nAvailableInputTypes)
		return MF_E_NO_MORE_TYPES;

	hr = CDynLibManager::GetInstance()->pfnMFCreateMediaType(&pmt);
	if (SUCCEEDED(hr))
	{
		hr = pmt->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
		ASSERT(SUCCEEDED(hr));
		hr = pmt->SetGUID(MF_MT_SUBTYPE, s_pAvailableInputTypes[dwIndex]);
		ASSERT(SUCCEEDED(hr));
		*ppType = pmt;
	}
	return hr;
}
예제 #16
0
HRESULT CreatePhotoMediaType(IMFMediaType *pSrcMediaType, IMFMediaType **ppPhotoMediaType)
{
    *ppPhotoMediaType = NULL;

    const UINT32 uiFrameRateNumerator = 30;
    const UINT32 uiFrameRateDenominator = 1;

    IMFMediaType *pPhotoMediaType = NULL;

    HRESULT hr = MFCreateMediaType(&pPhotoMediaType);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = pPhotoMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Image);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = pPhotoMediaType->SetGUID(MF_MT_SUBTYPE, GUID_ContainerFormatJpeg);
    if (FAILED(hr))
    {
        goto done;
    }

    hr = CopyAttribute(pSrcMediaType, pPhotoMediaType, MF_MT_FRAME_SIZE);
    if (FAILED(hr))
    {
        goto done;
    }

    *ppPhotoMediaType = pPhotoMediaType;
    (*ppPhotoMediaType)->AddRef();

done:
    SafeRelease(&pPhotoMediaType);
    return hr;
}
// Called by the constructor only (for setting up the IMFTransform)
//
// Thread context: Dialog window thread
bool DecoderMF::CreateInputMediaType(IMFMediaType** mediaType)
{
	bool			ret = false;
	HRESULT			hr;
	IMFMediaType*	newMediaType = NULL;

	if (mediaType == NULL)
		return false;

	hr = MFCreateMediaType(&newMediaType);
    if (FAILED(hr))
		goto bail;

	hr = newMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
	if (FAILED(hr))
		goto bail;

	hr = newMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
	if (FAILED(hr))
		goto bail;

	ret = true;

bail:
	if (ret == false)
	{
		if (newMediaType != NULL)
		{
			newMediaType->Release();
			newMediaType = NULL;
		}
	}
	else
	{
		*mediaType = newMediaType;
	}

	return ret;
}
예제 #18
0
파일: wmfbase.cpp 프로젝트: TimSC/wmfcam
void SetSampleMetaData(IMFSourceReader *pReader, DWORD streamIndex, PyObject *out)
{
	//Set meta data in output object
	IMFMediaType *pCurrentType = NULL;
	LONG plStride = 0;
	GUID majorType=GUID_NULL, subType=GUID_NULL;
	UINT32 width = 0;
	UINT32 height = 0;

	HRESULT hr = pReader->GetCurrentMediaType(streamIndex, &pCurrentType);
	if(!SUCCEEDED(hr)) cout << "Error 3\n";
	BOOL isComp = FALSE;
	hr = pCurrentType->IsCompressedFormat(&isComp);
	PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp));
	hr = pCurrentType->GetGUID(MF_MT_MAJOR_TYPE, &majorType);
	LPCWSTR typePtr = GetGUIDNameConst(majorType);
	if(!SUCCEEDED(hr)) cout << "Error 4\n";
	hr = pCurrentType->GetGUID(MF_MT_SUBTYPE, &subType);
	if(!SUCCEEDED(hr)) cout << "Error 5\n";
	int isVideo = (majorType==MFMediaType_Video);
	if(isVideo)
	{
		GetDefaultStride(pCurrentType, &plStride);
		hr = MFGetAttributeSize(pCurrentType, MF_MT_FRAME_SIZE, &width, &height);
		if(!SUCCEEDED(hr)) cout << "Error 20\n";
	}

	LPCWSTR subTypePtr = GetGUIDNameConst(subType);
	//if(subTypePtr!=0) wcout << "subtype\t" << subTypePtr << "\n";

	PyDict_SetItemStringAndDeleteVar(out, "isCompressed", PyBool_FromLong(isComp));
	if(typePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "type", PyUnicode_FromWideChar(typePtr, wcslen(typePtr)));
	if(subTypePtr!=NULL) PyDict_SetItemStringAndDeleteVar(out, "subtype", PyUnicode_FromWideChar(subTypePtr, wcslen(subTypePtr)));
	if(!isComp) PyDict_SetItemStringAndDeleteVar(out, "stride", PyInt_FromLong(plStride));
	PyDict_SetItemStringAndDeleteVar(out, "width", PyInt_FromLong(width));
	PyDict_SetItemStringAndDeleteVar(out, "height", PyInt_FromLong(height));

}
예제 #19
0
int select_input_mediatype(IMFTransform *transform, int in_stream_id, ADTSData adts,
						UINT8 *user_data, UINT32 user_data_len, GUID audio_format) {
	HRESULT hr = S_OK;
	IMFMediaType *t;

	// actually you can get rid of the whole block of searching and filtering mess
	// if you know the exact parameters of your media stream
	hr = MFCreateMediaType(&t);
	if (FAILED(hr))
	{
		ReportError(L"Unable to create an empty MediaType", hr);
		return -1;
	}

	// basic definition
	t->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
	t->SetGUID(MF_MT_SUBTYPE, audio_format);

	// see https://docs.microsoft.com/en-us/windows/desktop/medfound/aac-decoder#example-media-types
	// and https://docs.microsoft.com/zh-cn/windows/desktop/api/mmreg/ns-mmreg-heaacwaveinfo_tag
	// for the meaning of the byte array below

	// for integrate into a larger project, it is recommended to wrap the parameters into a struct
	// and pass that struct into the function
	// const UINT8 aac_data[] = { 0x01, 0x00, 0xfe, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x11, 0x90 };
	// 0: raw aac 1: adts 2: adif 3: latm/laos
	t->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 1);
	t->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, adts.channels);
	t->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, adts.samplerate);
	// 0xfe = 254 = "unspecified"
	t->SetUINT32(MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, 254);
	t->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 1);
	t->SetBlob(MF_MT_USER_DATA, user_data, user_data_len);
	hr = transform->SetInputType(in_stream_id, t, 0);
	if (FAILED(hr))
	{
		ReportError(L"failed to select input types for MFT", hr);
		return -1;
	}

	return 0;
}
예제 #20
0
파일: capture.cpp 프로젝트: cchang326/2p5D
HRESULT CMFCamCapture::init()
{
    IMFMediaType *pType = NULL;
    HRESULT hr;

    CHECK_HR(hr = CoInitializeEx(NULL, COINIT_MULTITHREADED));
    CHECK_HR(hr = MFStartup(MF_VERSION));

    // Configure the media type that the Sample Grabber will receive.
    // Setting the major and subtype is usually enough for the topology loader
    // to resolve the topology.

    CHECK_HR(hr = MFCreateMediaType(&pType));
    CHECK_HR(hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
    CHECK_HR(hr = pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB24));

    // Create the sample grabber sink.
    CHECK_HR(hr = SampleGrabberCB::CreateInstance(&m_spSampleGrabber));
    CHECK_HR(hr = MFCreateSampleGrabberSinkActivate(pType, m_spSampleGrabber, &m_spSinkActivate));

    // To run as fast as possible, set this attribute (requires Windows 7):
    CHECK_HR(hr = m_spSinkActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));

    // Create the Media Session.
    CHECK_HR(hr = MFCreateMediaSession(NULL, &m_spSession));

    CHECK_HR(hr = enumVideoDevices());

    CHECK_HR(hr = setupVideoDevice());

    CHECK_HR(hr = setupMfSession());
    
done:
    SafeRelease(&pType);
    return hr;
}
예제 #21
0
HRESULT EncodeTransform::SetOutputMediaType()
{
	if (!mpEncoder)
	{
		return MF_E_NOT_INITIALIZED;
	}

	IMFMediaType* pMediaTypeOut = NULL;
	HRESULT hr = MFCreateMediaType(&pMediaTypeOut);
	// Set the output media type.
	if (SUCCEEDED(hr))
	{
		hr = MFCreateMediaType(&pMediaTypeOut);
	}
	if (SUCCEEDED(hr))
	{
		hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
	}
	if (SUCCEEDED(hr))
	{
		hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, cVideoEncodingFormat);
	}
	if (SUCCEEDED(hr))
	{
		hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, VIDEO_BIT_RATE);
	}
	if (SUCCEEDED(hr))
	{
		hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, VIDEO_FPS, 1);
	}
	if (SUCCEEDED(hr))
	{
		hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, mStreamWidth, mStreamHeight);
	}
	if (SUCCEEDED(hr))
	{
		hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
	}
	if (SUCCEEDED(hr))
	{
		hr = pMediaTypeOut->SetUINT32(MF_MT_MPEG2_PROFILE, eAVEncH264VProfile_Base);
	}

	if (SUCCEEDED(hr))
	{
		hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
	}
	if (SUCCEEDED(hr))
	{
		hr = mpEncoder->SetOutputType(0, pMediaTypeOut, 0);
	}
	return hr;
}
HRESULT CHWMFT::GetInputAvailableType(
    DWORD           dwInputStreamID,
    DWORD           dwTypeIndex,
    IMFMediaType**  ppType)
{
    /*****************************************
    ** Todo: This function will return a media
    ** type at a given index. The SDK 
    ** implementation uses a static array of
    ** media types. Your MFT may want to use
    ** a dynamic array and modify the list 
    ** order depending on the MFTs state
    ** See http://msdn.microsoft.com/en-us/library/ms704814(v=VS.85).aspx
    *****************************************/

    HRESULT         hr      = S_OK;
    IMFMediaType*   pMT     = NULL;

    do
    {
        if(IsLocked() != FALSE)
        {
            hr = MF_E_TRANSFORM_ASYNC_LOCKED;
            break;
        }

        if(ppType == NULL)
        {
            hr = E_POINTER;
            break;
        }

        /*****************************************
        ** Todo: If your MFT supports more than one
        ** stream, make sure you modify
        ** MFT_MAX_STREAMS and adjust this function
        ** accordingly
        *****************************************/
        if(dwInputStreamID >= MFT_MAX_STREAMS)
        {
            hr = MF_E_INVALIDSTREAMNUMBER;
            break;
        }

        /*****************************************
        ** Todo: Modify the accepted input list
        ** g_ppguidInputTypes or use your own
        ** implementation of this function
        *****************************************/
        if(dwTypeIndex >= g_dwNumInputTypes)
        {
            hr = MF_E_NO_MORE_TYPES;
            break;
        }

        {
            CAutoLock lock(&m_csLock);

            hr = MFCreateMediaType(&pMT);
            if(FAILED(hr))
            {
                break;
            }

            hr = pMT->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
            if(FAILED(hr))
            {
                break;
            }

            hr = pMT->SetGUID(MF_MT_SUBTYPE, *(g_ppguidInputTypes[dwTypeIndex]));
            if(FAILED(hr))
            {
                break;
            }

            (*ppType) = pMT;
            (*ppType)->AddRef();
        }
    }while(false);

    SAFERELEASE(pMT);

    return hr;
}
// Thread context: Dialog window thread
DecoderMF::DecoderMF()
	: m_outputSample(NULL),
	m_previewWindow(NULL),
	m_decoderThread(NULL),
	m_decoderThreadRunning(false),
	m_decoderThreadEvent(NULL)
{
	HRESULT				hr;
	IMFMediaType*		mediaType;

	m_previewConfigured = true;

	hr = CoCreateInstance(CLSID_CMSH264DecoderMFT, NULL, CLSCTX_INPROC_SERVER, IID_IMFTransform, (void**)&m_h264Decoder);

	if (FAILED(hr))
		throw 1;

	// Create and set input Media Type
	if (! CreateInputMediaType(&mediaType))
	{
		m_h264Decoder->Release();
		throw 1;
	}

	hr = m_h264Decoder->SetInputType(0, mediaType, 0);
	mediaType->Release();
	if (FAILED(hr))
	{
		m_h264Decoder->Release();
		throw 1;
	}

	// Set output type
	hr = m_h264Decoder->GetOutputAvailableType(0, 0, &mediaType);
	if (FAILED(hr))
	{
		m_h264Decoder->Release();
		throw 1;
	}

	hr = m_h264Decoder->SetOutputType(0, mediaType, 0);
	mediaType->Release();
	if (FAILED(hr))
	{
		m_h264Decoder->Release();
		throw 1;
	}

	// Some sanity checks
	DWORD numInputStreams, numOutputStreams;
	hr = m_h264Decoder->GetStreamCount(&numInputStreams, &numOutputStreams);
	if (FAILED(hr))
	{
		m_h264Decoder->Release();
		throw 1;
	}

	if (numInputStreams != 1 || numOutputStreams != 1)
	{
		// This would be unexpected...
		m_h264Decoder->Release();
		throw 1;
	}

	MFT_OUTPUT_STREAM_INFO outputStreamInfo;
	hr = m_h264Decoder->GetOutputStreamInfo(0, &outputStreamInfo);
	if (FAILED(hr))
	{
		m_h264Decoder->Release();
		throw 1;
	}

	if ((outputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) ||
		!(outputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_FIXED_SAMPLE_SIZE) ||
		!(outputStreamInfo.dwFlags & MFT_OUTPUT_STREAM_WHOLE_SAMPLES))
	{
		// This would be unexpected...
		m_h264Decoder->Release();
		throw 1;
	}

	// Great - if we got here, it means Media Foundation is all OK. Lets
	// start the decoder thread.

	InitializeCriticalSection(&m_criticalSection);
	m_decoderThreadEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
	m_decoderThreadRunning = true;
	m_decoderThread = CreateThread(NULL, 0, DecoderThreadFunction, this, 0, NULL);
}
예제 #24
0
TargetFileMediaFoundation::TargetFileMediaFoundation( const DataTargetRef &dataTarget, size_t sampleRate, size_t numChannels, SampleType sampleType, const std::string &extension )
	: TargetFile( dataTarget, sampleRate, numChannels, sampleType ), mStreamIndex( 0 )
{
	MediaFoundationInitializer::initMediaFoundation();

	::IMFSinkWriter *sinkWriter;
	HRESULT hr = ::MFCreateSinkWriterFromURL( dataTarget->getFilePath().wstring().c_str(), NULL, NULL, &sinkWriter );
	if( hr != S_OK ) {
		string errorString = string( "TargetFileMediaFoundation: Failed to create SinkWriter from URL: " ) +  dataTarget->getFilePath().string(); 
		if( hr == HRESULT_FROM_WIN32( ERROR_FILE_NOT_FOUND ) )
			errorString += ", file not found.";
		throw AudioFileExc( errorString );
	}

	mSinkWriter = ci::msw::makeComUnique( sinkWriter );

	mSampleSize = getBytesPerSample( mSampleType );
	const UINT32 bitsPerSample = 8 * mSampleSize;
	const WORD blockAlignment = mNumChannels * mSampleSize;
	const DWORD averageBytesPerSecond = mSampleRate * blockAlignment;

	// Set the output media type.

	IMFMediaType *mediaType;
	hr = ::MFCreateMediaType( &mediaType );
	CI_ASSERT( hr == S_OK );
	auto mediaTypeManaged = ci::msw::makeComUnique( mediaType );

	hr = mediaType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio );
	CI_ASSERT( hr == S_OK );

	const ::GUID audioFormat = getMfAudioFormat( mSampleType );
	hr = mediaType->SetGUID( MF_MT_SUBTYPE, audioFormat );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, (UINT32)mSampleRate );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_AUDIO_BLOCK_ALIGNMENT, blockAlignment );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, averageBytesPerSecond );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, (UINT32)mNumChannels );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_ALL_SAMPLES_INDEPENDENT, 1 );
	CI_ASSERT( hr == S_OK );

	hr = mediaType->SetUINT32( MF_MT_FIXED_SIZE_SAMPLES, 1 );
	CI_ASSERT( hr == S_OK );

	hr = mSinkWriter->AddStream( mediaType, &mStreamIndex );
	CI_ASSERT( hr == S_OK );

	// Tell the sink writer to start accepting data.
	hr = mSinkWriter->BeginWriting();
	CI_ASSERT( hr == S_OK );
}
예제 #25
0
/** Cobbled together from:
 http://msdn.microsoft.com/en-us/library/dd757929(v=vs.85).aspx
 and http://msdn.microsoft.com/en-us/library/dd317928(VS.85).aspx
 -- Albert
 If anything in here fails, just bail. I'm not going to decode HRESULTS.
 -- Bill
 */
bool SoundSourceMediaFoundation::configureAudioStream(const AudioSourceConfig& audioSrcCfg) {
    HRESULT hr;

    // deselect all streams, we only want the first
    hr = m_pSourceReader->SetStreamSelection(
            MF_SOURCE_READER_ALL_STREAMS, false);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to deselect all streams";
        return false;
    }

    hr = m_pSourceReader->SetStreamSelection(
            kStreamIndex, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to select first audio stream";
        return false;
    }

    IMFMediaType* pAudioType = nullptr;

    hr = m_pSourceReader->GetCurrentMediaType(
            kStreamIndex, &pAudioType);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get current media type from stream";
        return false;
    }

    //------ Get bitrate from the file, before we change it to get uncompressed audio
    UINT32 avgBytesPerSecond;

    hr = pAudioType->GetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &avgBytesPerSecond);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "error getting MF_MT_AUDIO_AVG_BYTES_PER_SECOND";
        return false;
    }

    setBitrate( (avgBytesPerSecond * 8) / 1000);
    //------

    hr = pAudioType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set major type to audio";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set subtype format to float";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set all samples independent";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set fixed size samples";
        safeRelease(&pAudioType);
        return false;
    }

    hr = pAudioType->SetUINT32(
            MF_MT_AUDIO_BITS_PER_SAMPLE, kBitsPerSample);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set bits per sample:"
                << kBitsPerSample;
        safeRelease(&pAudioType);
        return false;
    }

    const UINT sampleSize = kLeftoverSize * kBytesPerSample;
    hr = pAudioType->SetUINT32(
            MF_MT_SAMPLE_SIZE, sampleSize);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set sample size:"
                << sampleSize;
        safeRelease(&pAudioType);
        return false;
    }

    UINT32 numChannels;
    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_NUM_CHANNELS, &numChannels);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get actual number of channels";
        return false;
    } else {
        qDebug() << "Number of channels in input stream" << numChannels;
    }
    if (audioSrcCfg.hasValidChannelCount()) {
        numChannels = audioSrcCfg.getChannelCount();
        hr = pAudioType->SetUINT32(
                MF_MT_AUDIO_NUM_CHANNELS, numChannels);
        if (FAILED(hr)) {
            qWarning() << kLogPreamble << hr
                    << "failed to set number of channels:"
                    << numChannels;
            safeRelease(&pAudioType);
            return false;
        }
        qDebug() << "Requested number of channels" << numChannels;
    }

    UINT32 samplesPerSecond;
    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_SAMPLES_PER_SECOND, &samplesPerSecond);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get samples per second";
        return false;
    } else {
        qDebug() << "Samples per second in input stream" << samplesPerSecond;
    }
    if (audioSrcCfg.hasValidSamplingRate()) {
        samplesPerSecond = audioSrcCfg.getSamplingRate();
        hr = pAudioType->SetUINT32(
                MF_MT_AUDIO_SAMPLES_PER_SECOND, samplesPerSecond);
        if (FAILED(hr)) {
            qWarning() << kLogPreamble << hr
                    << "failed to set samples per second:"
                    << samplesPerSecond;
            safeRelease(&pAudioType);
            return false;
        }
        qDebug() << "Requested samples per second" << samplesPerSecond;
    }

    // Set this type on the source reader. The source reader will
    // load the necessary decoder.
    hr = m_pSourceReader->SetCurrentMediaType(
            kStreamIndex, nullptr, pAudioType);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to set media type";
        safeRelease(&pAudioType);
        return false;
    }

    // Finally release the reference before reusing the pointer
    safeRelease(&pAudioType);

    // Get the resulting output format.
    hr = m_pSourceReader->GetCurrentMediaType(
            kStreamIndex, &pAudioType);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to retrieve completed media type";
        return false;
    }

    // Ensure the stream is selected.
    hr = m_pSourceReader->SetStreamSelection(
            kStreamIndex, true);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to select first audio stream (again)";
        return false;
    }

    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_NUM_CHANNELS, &numChannels);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get actual number of channels";
        return false;
    }
    setChannelCount(numChannels);

    hr = pAudioType->GetUINT32(
            MF_MT_AUDIO_SAMPLES_PER_SECOND, &samplesPerSecond);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get the actual sample rate";
        return false;
    }
    setSamplingRate(samplesPerSecond);

    UINT32 leftoverBufferSizeInBytes = 0;
    hr = pAudioType->GetUINT32(MF_MT_SAMPLE_SIZE, &leftoverBufferSizeInBytes);
    if (FAILED(hr)) {
        qWarning() << kLogPreamble << hr
                << "failed to get sample buffer size (in bytes)";
        return false;
    }
    DEBUG_ASSERT((leftoverBufferSizeInBytes % kBytesPerSample) == 0);
    m_sampleBuffer.resetCapacity(leftoverBufferSizeInBytes / kBytesPerSample);
    DEBUG_ASSERT(m_sampleBuffer.getCapacity() > 0);
    qDebug() << kLogPreamble
            << "Sample buffer capacity"
            << m_sampleBuffer.getCapacity();

            
    // Finally release the reference
    safeRelease(&pAudioType);

    return true;
}
예제 #26
0
/*
Adds the video and audio stream to the H.264 writer sink.
*/
HRESULT ConfigureEncoder(IMFMediaType *pVideoType, DWORD *videoStreamIndex, DWORD *audioStreamIndex, IMFSinkWriter *pWriter)
{
	IMFMediaType *pVideoOutType = NULL;
	IMFMediaType *pAudioOutType = NULL;

	// Configure the video stream.
	CHECK_HR(MFCreateMediaType(&pVideoOutType), L"Configure encoder failed to create media type for video output sink.");
	CHECK_HR(pVideoOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), L"Failed to set video writer attribute, media type.");
	CHECK_HR(pVideoOutType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264), L"Failed to set video writer attribute, video format (H.264).");
	CHECK_HR(pVideoOutType->SetUINT32(MF_MT_AVG_BITRATE, 240 * 1000), L"Failed to set video writer attribute, bit rate.");
	CHECK_HR(CopyAttribute(pVideoType, pVideoOutType, MF_MT_FRAME_SIZE), L"Failed to set video writer attribute, frame size.");
	CHECK_HR(CopyAttribute(pVideoType, pVideoOutType, MF_MT_FRAME_RATE), L"Failed to set video writer attribute, frame rate.");
	CHECK_HR(CopyAttribute(pVideoType, pVideoOutType, MF_MT_PIXEL_ASPECT_RATIO), L"Failed to set video writer attribute, aspect ratio.");
	CHECK_HR(CopyAttribute(pVideoType, pVideoOutType, MF_MT_INTERLACE_MODE), L"Failed to set video writer attribute, interlace mode.");;
	CHECK_HR(pWriter->AddStream(pVideoOutType, videoStreamIndex), L"Failed to add the video stream to the sink writer.");
	pVideoOutType->Release();

	// Configure the audio stream.
	// See http://msdn.microsoft.com/en-us/library/windows/desktop/dd742785(v=vs.85).aspx for AAC encoder settings.
	// http://msdn.microsoft.com/en-us/library/ff819476%28VS.85%29.aspx
	CHECK_HR(MFCreateMediaType(&pAudioOutType), L"Configure encoder failed to create media type for audio output sink.");
	CHECK_HR(pAudioOutType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio), L"Failed to set audio writer attribute, media type.");
	CHECK_HR(pAudioOutType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_AAC), L"Failed to set audio writer attribute, audio format (AAC).");
	CHECK_HR(pAudioOutType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, 2), L"Failed to set audio writer attribute, number of channels.");
	CHECK_HR(pAudioOutType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, 16), L"Failed to set audio writer attribute, bits per sample.");
	CHECK_HR(pAudioOutType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, 44100), L"Failed to set audio writer attribute, samples per second.");
	CHECK_HR(pAudioOutType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, 16000), L"Failed to set audio writer attribute, average bytes per second.");
	//CHECK_HR( pAudioOutType->SetUINT32( MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, 0x29 ), L"Failed to set audio writer attribute, level indication.");
	CHECK_HR(pWriter->AddStream(pAudioOutType, audioStreamIndex), L"Failed to add the audio stream to the sink writer.");
	pAudioOutType->Release();

	return S_OK;
}
HRESULT CHWMFT::GetOutputAvailableType(
    DWORD           dwOutputStreamID,
    DWORD           dwTypeIndex,
    IMFMediaType**  ppType)
{
    /*****************************************
    ** Todo: This function will return a media
    ** type at a given index. The SDK 
    ** implementation uses a static array of
    ** media types. Your MFT may want to use
    ** a dynamic array and modify the list 
    ** order depending on the MFTs state
    ** See http://msdn.microsoft.com/en-us/library/ms703812(v=VS.85).aspx
    *****************************************/

    HRESULT         hr      = S_OK;
    IMFMediaType*   pMT     = NULL;

    do
    {
        if(IsLocked() != FALSE)
        {
            hr = MF_E_TRANSFORM_ASYNC_LOCKED;
            break;
        }

        if(ppType == NULL)
        {
            hr = E_POINTER;
            break;
        }

        /*****************************************
        ** Todo: If your MFT supports more than one
        ** stream, make sure you modify
        ** MFT_MAX_STREAMS and adjust this function
        ** accordingly
        *****************************************/
        if(dwOutputStreamID >= MFT_MAX_STREAMS)
        {
            hr = MF_E_INVALIDSTREAMNUMBER;
            break;
        }

        /*****************************************
        ** Todo: Modify the accepted output list
        ** g_ppguidOutputTypes or use your own
        ** implementation of this function
        *****************************************/
        if(dwTypeIndex >= g_dwNumOutputTypes)
        {
            hr = MF_E_NO_MORE_TYPES;
            break;
        }

        {
            CAutoLock lock(&m_csLock);

            hr = MFCreateMediaType(&pMT);
            if(FAILED(hr))
            {
                break;
            }

            hr = pMT->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
            if(FAILED(hr))
            {
                break;
            }

            hr = pMT->SetGUID(MF_MT_SUBTYPE, *(g_ppguidOutputTypes[dwTypeIndex]));
            if(FAILED(hr))
            {
                break;
            }

            /*****************************************
            ** Todo: The following implementation
            ** forces a standard output resolution
            ** and framerate. Your MFT should set these
            ** values properly and update the Media
            ** Type as necessary after decoding the
            ** stream
            *****************************************/
            hr = MFSetAttributeSize(pMT, MF_MT_FRAME_SIZE, MFT_OUTPUT_WIDTH, MFT_OUTPUT_HEIGHT);
            if(FAILED(hr))
            {
                break;
            }

            hr = MFSetAttributeRatio(pMT, MF_MT_FRAME_RATE, MFT_FRAMERATE_NUMERATOR, MFT_FRAMERATE_DENOMINATOR);
            if(FAILED(hr))
            {
                break;
            }

            (*ppType) = pMT;
            (*ppType)->AddRef();
        }
    }while(false);

    SAFERELEASE(pMT);

    return hr;
}
예제 #28
0
HRESULT CASFManager::SetupStreamDecoder (WORD wStreamNumber, 
                                         GUID* pguidCurrentMediaType)
{
    if (! m_pContentInfo)
    {
        return MF_E_NOT_INITIALIZED;
    }

    if (wStreamNumber == 0)
    {
        return E_INVALIDARG;
    }

    IMFASFProfile* pProfile = NULL;
    IMFMediaType* pMediaType = NULL;
    IMFASFStreamConfig *pStream = NULL;

    GUID    guidMajorType = GUID_NULL;
    GUID    guidSubType = GUID_NULL;
    GUID    guidDecoderCategory = GUID_NULL;

    BOOL fIsCompressed = TRUE;

    CLSID *pDecoderCLSIDs = NULL;   // Pointer to an array of CLISDs. 
    UINT32 cDecoderCLSIDs = 0;   // Size of the array.
    
    HRESULT hr = S_OK;

    //Get the profile object that stores stream information
    CHECK_HR(hr =  m_pContentInfo->GetProfile(&pProfile));

    //Get stream configuration object from the profile
    CHECK_HR(hr = pProfile->GetStreamByNumber(wStreamNumber, &pStream));

    //Get the media type
    CHECK_HR(hr = pStream->GetMediaType(&pMediaType));

    //Get the major media type
    CHECK_HR(hr = pMediaType->GetMajorType(&guidMajorType));
        
    //Get the sub media type
    CHECK_HR(hr = pMediaType->GetGUID(MF_MT_SUBTYPE, &guidSubType));
    
    //find out if the media type is compressed
    CHECK_HR(hr = pMediaType->IsCompressedFormat(&fIsCompressed));

    if (fIsCompressed)
    {
        //get decoder category
        if (guidMajorType == MFMediaType_Video)
        {
            guidDecoderCategory = MFT_CATEGORY_VIDEO_DECODER;
        }
        else if (guidMajorType == MFMediaType_Audio)
        {
            guidDecoderCategory = MFT_CATEGORY_AUDIO_DECODER;
        }
        else
        {
            CHECK_HR(hr = MF_E_INVALIDMEDIATYPE);
        }

        // Look for a decoder.
        MFT_REGISTER_TYPE_INFO tinfo;
        tinfo.guidMajorType = guidMajorType;
        tinfo.guidSubtype = guidSubType;

        CHECK_HR(hr = MFTEnum(
            guidDecoderCategory,
            0,                  // Reserved
            &tinfo,             // Input type to match. (Encoded type.)
            NULL,               // Output type to match. (Don't care.)
            NULL,               // Attributes to match. (None.)
            &pDecoderCLSIDs,    // Receives a pointer to an array of CLSIDs.
            &cDecoderCLSIDs     // Receives the size of the array.
            ));

        // MFTEnum can return zero matches.
        if (cDecoderCLSIDs == 0)
        {
            hr = MF_E_TOPO_CODEC_NOT_FOUND;
        }
        else
        {
            //if the CDecoder instance does not exist, create one.
            if (!m_pDecoder)
            {
                CHECK_HR(hr = CDecoder::CreateInstance(&m_pDecoder));
            }
            
            //Load the first MFT in the array for the current media type
            CHECK_HR(hr = m_pDecoder->Initialize(pDecoderCLSIDs[0], pMediaType));
        }
        *pguidCurrentMediaType = guidMajorType;
    }
    else
    {
        // Not compressed. Don't need a decoder. 
         CHECK_HR(hr = MF_E_INVALIDREQUEST);
    }


    TRACE((L"Stream decoder loaded.\n"));

done:

    LOG_MSG_IF_FAILED(L"CASFManager::SetupStreamDecoder failed.\n", hr);
    
    SAFE_RELEASE(pProfile);
    SAFE_RELEASE(pMediaType);
    SAFE_RELEASE(pStream);

    CoTaskMemFree(pDecoderCLSIDs);

    return hr;
}
예제 #29
0
unsigned char *BBWin8Game::LoadAudioData( String path,int *length,int *channels,int *format,int *hertz ){

	String url=PathToFilePath( path );
	
	DXASS( MFStartup( MF_VERSION ) );
	
	IMFAttributes *attrs;
	DXASS( MFCreateAttributes( &attrs,1 ) );
	DXASS( attrs->SetUINT32( MF_LOW_LATENCY,TRUE ) );
	
	IMFSourceReader *reader;
	DXASS( MFCreateSourceReaderFromURL( url.ToCString<wchar_t>(),attrs,&reader ) );
	
	attrs->Release();

	IMFMediaType *mediaType;
	DXASS( MFCreateMediaType( &mediaType ) );
	DXASS( mediaType->SetGUID( MF_MT_MAJOR_TYPE,MFMediaType_Audio ) );
	DXASS( mediaType->SetGUID( MF_MT_SUBTYPE,MFAudioFormat_PCM ) );

	DXASS( reader->SetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,mediaType ) );
    
	mediaType->Release();

	IMFMediaType *outputMediaType;
	DXASS( reader->GetCurrentMediaType( MF_SOURCE_READER_FIRST_AUDIO_STREAM,&outputMediaType ) );
	
	WAVEFORMATEX *wformat;
	uint32 formatByteCount=0;
	DXASS( MFCreateWaveFormatExFromMFMediaType( outputMediaType,&wformat,&formatByteCount ) );

	*channels=wformat->nChannels;
	*format=wformat->wBitsPerSample/8;
	*hertz=wformat->nSamplesPerSec;

	CoTaskMemFree( wformat );
    
	outputMediaType->Release();
/*    
	PROPVARIANT var;
	DXASS( reader->GetPresentationAttribute( MF_SOURCE_READER_MEDIASOURCE,MF_PD_DURATION,&var ) );
	LONGLONG duration=var.uhVal.QuadPart;
	float64 durationInSeconds=(duration / (float64)(10000 * 1000));
	m_maxStreamLengthInBytes=(uint32)( durationInSeconds * m_waveFormat.nAvgBytesPerSec );
*/
	std::vector<unsigned char*> bufs;
	std::vector<uint32> lens;
	uint32 len=0;
    
	for( ;; ){
		uint32 flags=0;
		IMFSample *sample;
		DXASS( reader->ReadSample( MF_SOURCE_READER_FIRST_AUDIO_STREAM,0,0,reinterpret_cast<DWORD*>(&flags),0,&sample ) );
		
		if( flags & MF_SOURCE_READERF_ENDOFSTREAM ){
			break;
		}
		if( sample==0 ){ 
			abort();
		}
		
		IMFMediaBuffer *mediaBuffer;
		DXASS( sample->ConvertToContiguousBuffer( &mediaBuffer ) );

		uint8 *audioData=0;
		uint32 sampleBufferLength=0;
		DXASS( mediaBuffer->Lock( &audioData,0,reinterpret_cast<DWORD*>( &sampleBufferLength ) ) );
		
		unsigned char *buf=(unsigned char*)malloc( sampleBufferLength );
		memcpy( buf,audioData,sampleBufferLength );
		
		bufs.push_back( buf );
		lens.push_back( sampleBufferLength );
		len+=sampleBufferLength;
		
		DXASS( mediaBuffer->Unlock() );
		mediaBuffer->Release();
		
		sample->Release();
	}
	
	reader->Release();
	
	*length=len/(*channels * *format);

	unsigned char *data=(unsigned char*)malloc( len );
	unsigned char *p=data;
	
	for( int i=0;i<bufs.size();++i ){
		memcpy( p,bufs[i],lens[i] );
		free( bufs[i] );
		p+=lens[i];
	}
	
	gc_force_sweep=true;
	
	return data;
}	
예제 #30
0
HRESULT ConfigureSourceReader(IMFSourceReader *pReader)
{
    // The list of acceptable types.
    GUID subtypes[] = { 
        MFVideoFormat_NV12, MFVideoFormat_YUY2, MFVideoFormat_UYVY,
        MFVideoFormat_RGB32, MFVideoFormat_RGB24, MFVideoFormat_IYUV
    };

    HRESULT hr = S_OK;
    BOOL    bUseNativeType = FALSE;

    GUID subtype = { 0 };

    IMFMediaType *pType = NULL;

    // If the source's native format matches any of the formats in 
    // the list, prefer the native format.

    // Note: The camera might support multiple output formats, 
    // including a range of frame dimensions. The application could
    // provide a list to the user and have the user select the
    // camera's output format. That is outside the scope of this
    // sample, however.

    hr = pReader->GetNativeMediaType(
        (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
        0,  // Type index
        &pType
        );

    if (FAILED(hr)) { goto done; }

    hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);

    if (FAILED(hr)) { goto done; }

    for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
    {
        if (subtype == subtypes[i])
        {
            hr = pReader->SetCurrentMediaType(
                (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
                NULL, 
                pType
                );

            bUseNativeType = TRUE;
            break;
        }
    }

    if (!bUseNativeType)
    {
        // None of the native types worked. The camera might offer 
        // output a compressed type such as MJPEG or DV.

        // Try adding a decoder.

        for (UINT32 i = 0; i < ARRAYSIZE(subtypes); i++)
        {
            hr = pType->SetGUID(MF_MT_SUBTYPE, subtypes[i]);

            if (FAILED(hr)) { goto done; }

            hr = pReader->SetCurrentMediaType(
                (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 
                NULL, 
                pType
                );

            if (SUCCEEDED(hr))
            {
                break;
            }
        }
    }

done:
    SafeRelease(&pType);
    return hr;
}